hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6a88b3358c960a5a7ffc39b6ec023d9ec0cadf95 | 20,252 | py | Python | libcst/codemod/visitors/tests/test_add_imports.py | jschavesr/LibCST | e5ab7b90b4c9cd1f46e5b875ad317411abf48298 | [
"Apache-2.0"
] | 1 | 2022-02-10T10:59:22.000Z | 2022-02-10T10:59:22.000Z | libcst/codemod/visitors/tests/test_add_imports.py | jschavesr/LibCST | e5ab7b90b4c9cd1f46e5b875ad317411abf48298 | [
"Apache-2.0"
] | null | null | null | libcst/codemod/visitors/tests/test_add_imports.py | jschavesr/LibCST | e5ab7b90b4c9cd1f46e5b875ad317411abf48298 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
from libcst.codemod import CodemodContext, CodemodTest
from libcst.codemod.visitors import AddImportsVisitor, ImportItem
class TestAddImportsCodemod(CodemodTest):
TRANSFORM = AddImportsVisitor
def test_noop(self) -> None:
"""
Should do nothing.
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [])
def test_add_module_simple(self) -> None:
"""
Should add module as an import.
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
import a.b.c
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", None, None)])
def test_dont_add_module_simple(self) -> None:
"""
Should not add module as an import since it exists
"""
before = """
import a.b.c
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
import a.b.c
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", None, None)])
def test_add_module_alias_simple(self) -> None:
"""
Should add module with alias as an import.
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
import a.b.c as d
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", None, "d")])
def test_dont_add_module_alias_simple(self) -> None:
"""
Should not add module with alias as an import since it exists
"""
before = """
import a.b.c as d
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
import a.b.c as d
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", None, "d")])
def test_add_module_complex(self) -> None:
"""
Should add some modules as an import.
"""
before = """
import argparse
import sys
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
import argparse
import sys
import a.b.c
import defg.hi
import jkl as h
import i.j as k
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[
ImportItem("a.b.c", None, None),
ImportItem("defg.hi", None, None),
ImportItem("argparse", None, None),
ImportItem("jkl", None, "h"),
ImportItem("i.j", None, "k"),
],
)
def test_add_object_simple(self) -> None:
"""
Should add object as an import.
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from a.b.c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)])
def test_add_object_alias_simple(self) -> None:
"""
Should add object with alias as an import.
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from a.b.c import D as E
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", "D", "E")])
def test_add_future(self) -> None:
"""
Should add future import before any other imports.
"""
before = """
import unittest
import abc
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from __future__ import dummy_feature
import unittest
import abc
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before, after, [ImportItem("__future__", "dummy_feature", None)]
)
def test_dont_add_object_simple(self) -> None:
"""
Should not add object as an import since it exists.
"""
before = """
from a.b.c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from a.b.c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)])
def test_dont_add_object_alias_simple(self) -> None:
"""
Should not add object as an import since it exists.
"""
before = """
from a.b.c import D as E
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from a.b.c import D as E
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", "D", "E")])
def test_add_object_modify_simple(self) -> None:
"""
Should modify existing import to add new object
"""
before = """
from a.b.c import E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from a.b.c import D, E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)])
def test_add_object_alias_modify_simple(self) -> None:
"""
Should modify existing import with alias to add new object
"""
before = """
from a.b.c import E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from a.b.c import D as _, E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", "D", "_")])
def test_add_object_modify_complex(self) -> None:
"""
Should modify existing import to add new object
"""
before = """
from a.b.c import E, F, G as H
from d.e.f import Foo, Bar
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from a.b.c import D, E, F, G as H
from d.e.f import Baz as Qux, Foo, Bar
from g.h.i import V as W, X, Y, Z
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[
ImportItem("a.b.c", "D", None),
ImportItem("a.b.c", "F", None),
ImportItem("a.b.c", "G", "H"),
ImportItem("d.e.f", "Foo", None),
ImportItem("g.h.i", "Z", None),
ImportItem("g.h.i", "X", None),
ImportItem("d.e.f", "Bar", None),
ImportItem("d.e.f", "Baz", "Qux"),
ImportItem("g.h.i", "Y", None),
ImportItem("g.h.i", "V", "W"),
ImportItem("a.b.c", "F", None),
],
)
def test_add_and_modify_complex(self) -> None:
"""
Should correctly add both module and object imports
"""
before = """
import argparse
import sys
from a.b.c import E, F
from d.e.f import Foo, Bar
import bar as baz
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
import argparse
import sys
from a.b.c import D, E, F
from d.e.f import Foo, Bar
import bar as baz
import foo
import qux as quux
from g.h.i import X, Y, Z
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[
ImportItem("a.b.c", "D", None),
ImportItem("a.b.c", "F", None),
ImportItem("d.e.f", "Foo", None),
ImportItem("sys", None, None),
ImportItem("g.h.i", "Z", None),
ImportItem("g.h.i", "X", None),
ImportItem("d.e.f", "Bar", None),
ImportItem("g.h.i", "Y", None),
ImportItem("foo", None, None),
ImportItem("a.b.c", "F", None),
ImportItem("bar", None, "baz"),
ImportItem("qux", None, "quux"),
],
)
def test_add_import_preserve_doctring_simple(self) -> None:
"""
Should preserve any doctring if adding to the beginning.
"""
before = """
# This is some docstring
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
# This is some docstring
from a.b.c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(before, after, [ImportItem("a.b.c", "D", None)])
def test_add_import_preserve_doctring_multiples(self) -> None:
"""
Should preserve any doctring if adding to the beginning.
"""
before = """
# This is some docstring
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
# This is some docstring
import argparse
from a.b.c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("a.b.c", "D", None), ImportItem("argparse", None, None)],
)
def test_strict_module_no_imports(self) -> None:
"""
First added import in strict module should go after __strict__ flag.
"""
before = """
__strict__ = True
class Foo:
pass
"""
after = """
__strict__ = True
import argparse
class Foo:
pass
"""
self.assertCodemod(before, after, [ImportItem("argparse", None, None)])
def test_strict_module_with_imports(self) -> None:
"""
First added import in strict module should go after __strict__ flag.
"""
before = """
__strict__ = True
import unittest
class Foo:
pass
"""
after = """
__strict__ = True
import unittest
import argparse
class Foo:
pass
"""
self.assertCodemod(before, after, [ImportItem("argparse", None, None)])
def test_dont_add_relative_object_simple(self) -> None:
"""
Should not add object as an import since it exists.
"""
before = """
from .c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from .c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("a.b.c", "D", None)],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
def test_add_object_relative_modify_simple(self) -> None:
"""
Should modify existing import to add new object
"""
before = """
from .c import E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from .c import D, E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("a.b.c", "D", None)],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
def test_import_order(self) -> None:
"""
The imports should be in alphabetic order of added imports, added import alias, original imports.
"""
before = """
from a import b, e, h
"""
after = """
from a import c, f, d as x, g as y, b, e, h
"""
self.assertCodemod(
before,
after,
[
ImportItem("a", "f", None),
ImportItem("a", "g", "y"),
ImportItem("a", "c", None),
ImportItem("a", "d", "x"),
],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
def test_add_explicit_relative(self) -> None:
"""
Should add a relative import from .. .
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from .. import a
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("a", None, None, 2)],
)
def test_add_explicit_relative_alias(self) -> None:
"""
Should add a relative import from .. .
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from .. import a as foo
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("a", None, "foo", 2)],
)
def test_add_explicit_relative_object_simple(self) -> None:
"""
Should add a relative import.
"""
before = """
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from ..a import B
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("a", "B", None, 2)],
)
def test_dont_add_explicit_relative_object_simple(self) -> None:
"""
Should not add object as an import since it exists.
"""
before = """
from ..c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from ..c import D
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("c", "D", None, 2)],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
def test_add_object_explicit_relative_modify_simple(self) -> None:
"""
Should modify existing import to add new object.
"""
before = """
from ..c import E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from ..c import D, E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("c", "D", None, 2)],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
def test_add_object_resolve_explicit_relative_modify_simple(self) -> None:
"""
Should merge a relative new module with an absolute existing one.
"""
before = """
from ..c import E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from ..c import D, E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("c", "D", None, 2)],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
def test_add_object_resolve_dotted_relative_modify_simple(self) -> None:
"""
Should merge a relative new module with an absolute existing one.
"""
before = """
from ..c import E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
after = """
from ..c import D, E, F
def foo() -> None:
pass
def bar() -> int:
return 5
"""
self.assertCodemod(
before,
after,
[ImportItem("..c", "D", None)],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
def test_import_in_docstring_module(self) -> None:
"""
The import should be added after module docstring.
"""
before = """
'''Docstring.'''
import typing
"""
after = """
'''Docstring.'''
from __future__ import annotations
import typing
"""
self.assertCodemod(
before,
after,
[ImportItem("__future__", "annotations", None)],
context_override=CodemodContext(full_module_name="a.b.foobar"),
)
| 23.066059 | 105 | 0.412552 | 1,990 | 20,252 | 4.100503 | 0.069347 | 0.047181 | 0.063725 | 0.089216 | 0.851593 | 0.829289 | 0.789706 | 0.748039 | 0.725735 | 0.71973 | 0 | 0.005423 | 0.471855 | 20,252 | 877 | 106 | 23.09236 | 0.757479 | 0.083695 | 0 | 0.832203 | 0 | 0 | 0.534039 | 0 | 0 | 0 | 0 | 0 | 0.050847 | 1 | 0.050847 | false | 0.094915 | 0.238983 | 0 | 0.381356 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
6a952b06f9123696d87cc6cead756a459f2f1552 | 11,834 | py | Python | Lidar_DLG/src/GraphMatchByLineIntersectionPoints/all_Ransac.py | Anonymous772066235/GraduationDesignProgram | 3337c48005def8515a4660fcaa004dcfbe4766be | [
"MIT"
] | null | null | null | Lidar_DLG/src/GraphMatchByLineIntersectionPoints/all_Ransac.py | Anonymous772066235/GraduationDesignProgram | 3337c48005def8515a4660fcaa004dcfbe4766be | [
"MIT"
] | null | null | null | Lidar_DLG/src/GraphMatchByLineIntersectionPoints/all_Ransac.py | Anonymous772066235/GraduationDesignProgram | 3337c48005def8515a4660fcaa004dcfbe4766be | [
"MIT"
] | null | null | null | # File :all_02.py
# Author :WJ
# Function :
# Time :2021/02/21
# Version :
# Amend :
import time
import numpy as np
from scipy.optimize import linear_sum_assignment
try:
import GraphMatchByLineIntersectionPoints.LineProcess as lp
import GraphMatchByLineIntersectionPoints.LaplacianMatrice as lm
import GraphMatchByLineIntersectionPoints.TransformationMatrix as tf
import GraphMatchByLineIntersectionPoints.Visualization as vs
import GraphMatchByLineIntersectionPoints.LineFit_Ransac as lf
except:
import LineProcess as lp
import LaplacianMatrice as lm
import TransformationMatrix as tf
import Visualization as vs
import LineFit_Ransac as lf
def run(data_dlg, data_dopp, name='ABC'):
if name == 'PCB':
# 导入数据
data_dlg0 = data_dlg[:, 0:2]
data_dopp0 = data_dopp[:, 0:2]
# 合并重叠点及邻近点
data_dlg = lf.mergeClosePoints(data_dlg0)
data_dopp = lf.mergeClosePoints(data_dopp0, 1.8)
# 搜索直线
Lines_dlg = lf.searchLines(data_dlg, 17, 2000, 6, 0.5)
Lines_dopp = lf.searchLines(data_dopp, 15, 2000, 6, 0.5)
# 处理直线
Lines_dlg = lf.processLines(Lines_dlg)
Lines_dlg = lf.mergeLines_ABC(Lines_dlg, X0=np.mean(data_dlg[:, 0]), Y0=np.mean(data_dlg[:, 1]), slope=0.1,
intercept=17)
Lines_dopp = lf.processLines(Lines_dopp)
Lines_dopp = lf.mergeLines_ABC(Lines_dopp, X0=np.mean(data_dopp[:, 0]), Y0=np.mean(data_dopp[:, 1]), slope=0.1,
intercept=12)
# 可视化直线
lf.showLines(data_dlg, Lines_dlg, np.mean(data_dlg[:, 0]), np.mean(data_dlg[:, 1]),
name='Lines_dlg_ransac' + str(len(Lines_dlg)))
lf.showLines(data_dopp, Lines_dopp, np.mean(data_dopp[:, 0]), np.mean(data_dopp[:, 1]),
name='Lines_dopp_ransac' + str(len(Lines_dopp)))
P_dopp = lp.GetIntersectPointofLines(Lines_dopp)
P_dlg = lp.GetIntersectPointofLines(Lines_dlg)
L_dopp = lm.LaplacianMatrice(P_dopp, sigma=200)
L_dlg = lm.LaplacianMatrice(P_dlg, sigma=200)
U_dopp, Lambda_dopp = lm.LaplacianMatrice_decomposed(L_dopp)
U_dlg, Lambda_dlg = lm.LaplacianMatrice_decomposed(L_dlg)
# 计算相异度矩阵
k = min(len(P_dlg), len(P_dopp))
A = lm.corrlation(U_dopp, U_dlg, k)
# 对相似度矩阵进行二分匹配(删除相异度过大的结果)
row_ind, col_ind = linear_sum_assignment(A)
row, col = lm.DeleteLargeValue(A, row_ind, col_ind, 0.8)
# 根据匹配结果对点云重新排序
P_dlg_new = lm.resort_clouds(P_dlg, row)
P_dopp_new = lm.resort_clouds(P_dopp, col)
# 可视化直线交点匹配结果
vs.VisualizeMatch(P_dopp, P_dlg, row, col, '直线交点_ransac')
# 计算变换矩阵(并对dopp进行变换)
R, T = tf.ca_rt(P_dopp_new, P_dlg_new)
data_dopp = tf.transformation(data_dopp0, R, T)
# 可视化原始点云配准结果
vs.Visualize2PointClouds(data_dopp, data_dlg0, 'Macth_dlg&dopp_ransac', feature1=['blue', 'dopp', '.'],
feature2=['red', 'dlg', '.'])
return R, T, data_dopp
elif name == 'ABC':
# 导入数据
data_dlg0 = data_dlg[:, 0:2]
data_dopp0 = data_dopp[:, 0:2]
# 合并重叠点及邻近点
data_dlg = lf.mergeClosePoints(data_dlg0)
data_dopp = lf.mergeClosePoints(data_dopp0)
# 搜索直线
Lines_dlg = lf.searchLines(data_dlg, 7, 3000, 4, 0.1)
Lines_dopp = lf.searchLines(data_dopp, 7, 3000, 4, 0.1)
# Lines_dlg = lf.searchLines(data_dlg, 12, 4000, 4, 0.2)
# Lines_dopp = lf.searchLines(data_dopp,12, 4000, 4,0.2)
# 处理直线
Lines_dlg = lf.processLines(Lines_dlg)
Lines_dlg = lf.mergeLines_ABC(Lines_dlg, X0=np.mean(data_dlg[:, 0]), Y0=np.mean(data_dlg[:, 1]), slope=0.1,
intercept=15)
Lines_dopp = lf.processLines(Lines_dopp)
Lines_dopp = lf.mergeLines_ABC(Lines_dopp, X0=np.mean(data_dopp[:, 0]), Y0=np.mean(data_dopp[:, 1]), slope=0.1,
intercept=15)
# 可视化直线
lf.showLines(data_dlg, Lines_dlg, np.mean(data_dlg[:, 0]), np.mean(data_dlg[:, 1]),
name='Lines_dlg_ransac_abc' + str(len(Lines_dlg)))
lf.showLines(data_dopp, Lines_dopp, np.mean(data_dopp[:, 0]), np.mean(data_dopp[:, 1]),
name='Lines_dopp_ransac_abc' + str(len(Lines_dopp)))
P_dopp = lp.GetIntersectPointofLines(Lines_dopp)
P_dlg = lp.GetIntersectPointofLines(Lines_dlg)
L_dopp = lm.LaplacianMatrice(P_dopp, sigma=200)
L_dlg = lm.LaplacianMatrice(P_dlg, sigma=200)
U_dopp, Lambda_dopp = lm.LaplacianMatrice_decomposed(L_dopp)
U_dlg, Lambda_dlg = lm.LaplacianMatrice_decomposed(L_dlg)
# 计算相异度矩阵
k = min(len(P_dlg), len(P_dopp))
A = lm.corrlation(U_dopp, U_dlg, k)
# 对相似度矩阵进行二分匹配(删除相异度过大的结果)
row_ind, col_ind = linear_sum_assignment(A)
row, col = lm.DeleteLargeValue(A, row_ind, col_ind, 0.9)
# 根据匹配结果对点云重新排序
P_dlg_new = lm.resort_clouds(P_dlg, row)
P_dopp_new = lm.resort_clouds(P_dopp, col)
# 可视化直线交点匹配结果
vs.VisualizeMatch(P_dopp, P_dlg, row, col, '直线交点_ransac_abc')
# 计算变换矩阵(并对dopp进行变换)
R, T = tf.ca_rt(P_dopp_new, P_dlg_new)
data_dopp = tf.transformation(data_dopp0, R, T)
# 可视化原始点云配准结果
vs.Visualize2PointClouds(data_dopp, data_dlg0, 'Macth_dlg&dopp_ransac_abc', feature1=['blue', 'dopp', '.'],
feature2=['red', 'dlg', '.'])
return R, T, data_dopp
if __name__ == "__main__":
# name='PCB'
name = 'ABC'
if name == 'PCB':
start0 = time.time()
# 导入数据
data_dlg = np.loadtxt('..\\data\\Polyline_PCB02_500.txt', delimiter=',')
data_dopp = np.loadtxt('..\\data\\PCB_c0.5_z1_t10.txt', delimiter='\t')
data_dlg0 = data_dlg[:, 0:2]
data_dopp0 = data_dopp[:, 0:2]
# 合并重叠点及邻近点
data_dlg = lf.mergeClosePoints(data_dlg0)
data_dopp = lf.mergeClosePoints(data_dopp0, 1.8)
# 搜索直线
Lines_dlg = lf.searchLines(data_dlg, 17, 2000, 6, 0.5)
Lines_dopp = lf.searchLines(data_dopp, 15, 2000, 6, 0.5)
# 处理直线
Lines_dlg = lf.processLines(Lines_dlg)
Lines_dlg = lf.mergeLines_ABC(Lines_dlg, X0=np.mean(data_dlg[:, 0]), Y0=np.mean(data_dlg[:, 1]), slope=0.1,
intercept=12)
Lines_dopp = lf.processLines(Lines_dopp)
Lines_dopp = lf.mergeLines_ABC(Lines_dopp, X0=np.mean(data_dopp[:, 0]), Y0=np.mean(data_dopp[:, 1]), slope=0.1,
intercept=12)
# 可视化直线
lf.showLines(data_dlg, Lines_dlg, np.mean(data_dlg[:, 0]), np.mean(data_dlg[:, 1]),
name='Lines_dlg_ransac' + str(len(Lines_dlg)))
lf.showLines(data_dopp, Lines_dopp, np.mean(data_dopp[:, 0]), np.mean(data_dopp[:, 1]),
name='Lines_dopp_ransac' + str(len(Lines_dopp)))
P_dopp = lp.GetIntersectPointofLines(Lines_dopp)
P_dlg = lp.GetIntersectPointofLines(Lines_dlg)
L_dopp = lm.LaplacianMatrice(P_dopp, sigma=200)
L_dlg = lm.LaplacianMatrice(P_dlg, sigma=200)
U_dopp, Lambda_dopp = lm.LaplacianMatrice_decomposed(L_dopp)
U_dlg, Lambda_dlg = lm.LaplacianMatrice_decomposed(L_dlg)
# 计算相异度矩阵
k = min(len(P_dlg), len(P_dopp))
A = lm.corrlation(U_dopp, U_dlg, k)
# 对相似度矩阵进行二分匹配(删除相异度过大的结果)
row_ind, col_ind = linear_sum_assignment(A)
row, col = lm.DeleteLargeValue(A, row_ind, col_ind, 0.9)
# 根据匹配结果对点云重新排序
P_dlg_new = lm.resort_clouds(P_dlg, row)
P_dopp_new = lm.resort_clouds(P_dopp, col)
# 可视化直线交点匹配结果
vs.VisualizeMatch(P_dopp, P_dlg, row, col, '直线交点_ransac')
# 计算变换矩阵(并对dopp进行变换)
R, T = tf.ca_rt(P_dopp_new, P_dlg_new, 'MatchingByLineIntersectionPoints_result_ransac.txt')
data_dopp = tf.transformation(data_dopp0, R, T, 'dopp_transformed_ransac.txt')
# 可视化原始点云配准结果
vs.Visualize2PointClouds(data_dopp, data_dlg0, 'Macth_dlg&dopp_ransac', feature1=['blue', 'dopp', '.'],
feature2=['red', 'dlg', '.'])
TIME = time.time() - start0
print('\n总耗时:{:.0f} hours {:.0f} minutes {:.0f} seconds'.format(TIME // 3600, TIME % 3600 // 60,
TIME % 3600 % 60))
elif name == 'ABC':
start0 = time.time()
# 导入数据
data_dlg = np.loadtxt('..\\data\\Polyline_ABC.txt', delimiter=',')
data_dopp = np.loadtxt('..\\data\\ABC_c1_z10_t10.txt', delimiter='\t')
data_dlg0 = data_dlg[:, 0:2]
data_dopp0 = data_dopp[:, 0:2]
# 合并重叠点及邻近点
data_dlg = lf.mergeClosePoints(data_dlg0)
data_dopp = lf.mergeClosePoints(data_dopp0)
# 搜索直线
Lines_dlg = lf.searchLines(data_dlg, 7, 3000, 4, 0.1)
Lines_dopp = lf.searchLines(data_dopp, 7, 3000, 4, 0.1)
# Lines_dlg = lf.searchLines(data_dlg, 12, 4000, 4, 0.2)
# Lines_dopp = lf.searchLines(data_dopp,12, 4000, 4,0.2)
# 处理直线
Lines_dlg = lf.processLines(Lines_dlg)
Lines_dlg = lf.mergeLines_ABC(Lines_dlg, X0=np.mean(data_dlg[:, 0]), Y0=np.mean(data_dlg[:, 1]), slope=0.1,
intercept=15)
Lines_dopp = lf.processLines(Lines_dopp)
Lines_dopp = lf.mergeLines_ABC(Lines_dopp, X0=np.mean(data_dopp[:, 0]), Y0=np.mean(data_dopp[:, 1]), slope=0.1,
intercept=15)
# 可视化直线
lf.showLines(data_dlg, Lines_dlg, np.mean(data_dlg[:, 0]), np.mean(data_dlg[:, 1]),
name='Lines_dlg_ransac_abc' + str(len(Lines_dlg)))
lf.showLines(data_dopp, Lines_dopp, np.mean(data_dopp[:, 0]), np.mean(data_dopp[:, 1]),
name='Lines_dopp_ransac_abc' + str(len(Lines_dopp)))
P_dopp = lp.GetIntersectPointofLines(Lines_dopp)
P_dlg = lp.GetIntersectPointofLines(Lines_dlg)
L_dopp = lm.LaplacianMatrice(P_dopp, sigma=200)
L_dlg = lm.LaplacianMatrice(P_dlg, sigma=200)
U_dopp, Lambda_dopp = lm.LaplacianMatrice_decomposed(L_dopp)
U_dlg, Lambda_dlg = lm.LaplacianMatrice_decomposed(L_dlg)
# 计算相异度矩阵
k = min(len(P_dlg), len(P_dopp))
A = lm.corrlation(U_dopp, U_dlg, k)
# 对相似度矩阵进行二分匹配(删除相异度过大的结果)
row_ind, col_ind = linear_sum_assignment(A)
row, col = lm.DeleteLargeValue(A, row_ind, col_ind, 0.9)
# 根据匹配结果对点云重新排序
P_dlg_new = lm.resort_clouds(P_dlg, row)
P_dopp_new = lm.resort_clouds(P_dopp, col)
# 可视化直线交点匹配结果
vs.VisualizeMatch(P_dopp, P_dlg, row, col, '直线交点_ransac_abc')
# 计算变换矩阵(并对dopp进行变换)
R, T = tf.ca_rt(P_dopp_new, P_dlg_new, 'MatchingByLineIntersectionPoints_result_ransac_abc.txt')
data_dopp = tf.transformation(data_dopp0, R, T, 'dopp_transformed_ransac_abc.txt')
# 可视化原始点云配准结果
vs.Visualize2PointClouds(data_dopp, data_dlg0, 'Macth_dlg&dopp_ransac_abc', feature1=['blue', 'dopp', '.'],
feature2=['red', 'dlg', '.'])
TIME = time.time() - start0
print('\n总耗时:{:.0f} hours {:.0f} minutes {:.0f} seconds'.format(TIME // 3600, TIME % 3600 // 60,
TIME % 3600 % 60))
| 41.090278 | 120 | 0.586446 | 1,522 | 11,834 | 4.291721 | 0.096583 | 0.057563 | 0.04899 | 0.031843 | 0.902174 | 0.897275 | 0.896969 | 0.886865 | 0.886865 | 0.886865 | 0 | 0.040367 | 0.29035 | 11,834 | 287 | 121 | 41.233449 | 0.737437 | 0.068193 | 0 | 0.833333 | 0 | 0 | 0.071228 | 0.038469 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005952 | false | 0 | 0.077381 | 0 | 0.095238 | 0.011905 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6aa5da5462e3a5b9e23622852fef26bceace1dd5 | 29,294 | py | Python | ironic/tests/unit/common/test_image_service.py | yanndegat/ironic | 8857ec76443dea7778bb9c0d66568304e52495e5 | [
"Apache-2.0"
] | 350 | 2015-01-02T09:35:49.000Z | 2022-03-28T09:25:59.000Z | ironic/tests/unit/common/test_image_service.py | yanndegat/ironic | 8857ec76443dea7778bb9c0d66568304e52495e5 | [
"Apache-2.0"
] | 7 | 2015-05-04T16:12:41.000Z | 2021-08-31T12:27:27.000Z | ironic/tests/unit/common/test_image_service.py | yanndegat/ironic | 8857ec76443dea7778bb9c0d66568304e52495e5 | [
"Apache-2.0"
] | 333 | 2015-01-06T09:09:22.000Z | 2022-02-20T08:11:40.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import builtins
import datetime
from http import client as http_client
import io
import os
import shutil
from unittest import mock
from oslo_config import cfg
from oslo_utils import uuidutils
import requests
from ironic.common import exception
from ironic.common.glance_service import image_service as glance_v2_service
from ironic.common import image_service
from ironic.tests import base
class HttpImageServiceTestCase(base.TestCase):
def setUp(self):
super(HttpImageServiceTestCase, self).setUp()
self.service = image_service.HttpImageService()
self.href = 'https://127.0.0.1:12345/fedora.qcow2'
@mock.patch.object(os.path, 'exists', autospec=True)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_http_scheme(self, head_mock, path_mock):
self.href = 'http://127.0.0.1:12345/fedora.qcow2'
response = head_mock.return_value
response.status_code = http_client.OK
self.service.validate_href(self.href)
path_mock.assert_not_called()
head_mock.assert_called_once_with(self.href, verify=True,
timeout=60)
response.status_code = http_client.NO_CONTENT
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
response.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_false(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', 'False')
response = head_mock.return_value
response.status_code = http_client.OK
self.service.validate_href(self.href)
head_mock.assert_called_once_with(self.href, verify=False,
timeout=60)
response.status_code = http_client.NO_CONTENT
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
response.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_false_error(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', 'False')
head_mock.side_effect = requests.ConnectionError()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href, verify=False,
timeout=60)
head_mock.side_effect = requests.RequestException()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_true(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', 'True')
response = head_mock.return_value
response.status_code = http_client.OK
self.service.validate_href(self.href)
head_mock.assert_called_once_with(self.href, verify=True,
timeout=60)
response.status_code = http_client.NO_CONTENT
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
response.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_true_error(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', 'True')
head_mock.side_effect = requests.ConnectionError()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href, verify=True,
timeout=60)
head_mock.side_effect = requests.RequestException()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_valid_path(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
response = head_mock.return_value
response.status_code = http_client.OK
self.service.validate_href(self.href)
head_mock.assert_called_once_with(self.href, verify='/some/path',
timeout=60)
response.status_code = http_client.NO_CONTENT
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
response.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_custom_timeout(self, head_mock):
cfg.CONF.set_override('webserver_connection_timeout', 15)
response = head_mock.return_value
response.status_code = http_client.OK
self.service.validate_href(self.href)
head_mock.assert_called_once_with(self.href, verify=True,
timeout=15)
response.status_code = http_client.NO_CONTENT
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
response.status_code = http_client.BAD_REQUEST
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_connect_error(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
response = mock.Mock()
response.status_code = http_client.OK
head_mock.side_effect = requests.ConnectionError()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href, verify='/some/path',
timeout=60)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_error(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
head_mock.side_effect = requests.RequestException()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href, verify='/some/path',
timeout=60)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_verify_os_error(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
head_mock.side_effect = OSError()
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
head_mock.assert_called_once_with(self.href, verify='/some/path',
timeout=60)
@mock.patch.object(requests, 'head', autospec=True)
def test_validate_href_error_with_secret_parameter(self, head_mock):
cfg.CONF.set_override('webserver_verify_ca', 'False')
head_mock.return_value.status_code = 204
e = self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href,
self.href,
True)
self.assertIn('secreturl', str(e))
self.assertNotIn(self.href, str(e))
head_mock.assert_called_once_with(self.href, verify=False,
timeout=60)
@mock.patch.object(requests, 'head', autospec=True)
def _test_show(self, head_mock, mtime, mtime_date):
head_mock.return_value.status_code = http_client.OK
head_mock.return_value.headers = {
'Content-Length': 100,
'Last-Modified': mtime
}
result = self.service.show(self.href)
head_mock.assert_called_once_with(self.href, verify=True,
timeout=60)
self.assertEqual({'size': 100, 'updated_at': mtime_date,
'properties': {}}, result)
def test_show_rfc_822(self):
self._test_show(mtime='Tue, 15 Nov 2014 08:12:31 GMT',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
def test_show_rfc_850(self):
self._test_show(mtime='Tuesday, 15-Nov-14 08:12:31 GMT',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
def test_show_ansi_c(self):
self._test_show(mtime='Tue Nov 15 08:12:31 2014',
mtime_date=datetime.datetime(2014, 11, 15, 8, 12, 31))
@mock.patch.object(requests, 'head', autospec=True)
def test_show_no_content_length(self, head_mock):
head_mock.return_value.status_code = http_client.OK
head_mock.return_value.headers = {}
self.assertRaises(exception.ImageRefValidationFailed,
self.service.show, self.href)
head_mock.assert_called_with(self.href, verify=True,
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success_http_scheme(self, req_get_mock, shutil_mock):
self.href = 'http://127.0.0.1:12345/fedora.qcow2'
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
self.service.download(self.href, file_mock)
shutil_mock.assert_called_once_with(
response_mock.raw.__enter__(), file_mock,
image_service.IMAGE_CHUNK_SIZE
)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify=True,
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success_verify_false(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', 'False')
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
self.service.download(self.href, file_mock)
shutil_mock.assert_called_once_with(
response_mock.raw.__enter__(), file_mock,
image_service.IMAGE_CHUNK_SIZE
)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify=False,
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success_verify_true(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', 'True')
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
self.service.download(self.href, file_mock)
shutil_mock.assert_called_once_with(
response_mock.raw.__enter__(), file_mock,
image_service.IMAGE_CHUNK_SIZE
)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify=True,
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success_verify_path(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
self.service.download(self.href, file_mock)
shutil_mock.assert_called_once_with(
response_mock.raw.__enter__(), file_mock,
image_service.IMAGE_CHUNK_SIZE
)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify='/some/path',
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_verify_false_connerror(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', False)
req_get_mock.side_effect = requests.ConnectionError()
file_mock = mock.Mock(spec=io.BytesIO)
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_verify_false_ioerror(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', False)
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
shutil_mock.side_effect = IOError
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify=False,
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success_verify_true_connerror(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
response_mock = mock.Mock()
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
req_get_mock.side_effect = requests.ConnectionError
file_mock = mock.Mock(spec=io.BytesIO)
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify='/some/path',
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_verify_true_ioerror(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
shutil_mock.side_effect = IOError
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify='/some/path',
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_fail_verify_true_oserror(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_verify_ca', '/some/path')
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
shutil_mock.side_effect = OSError()
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify='/some/path',
timeout=60)
@mock.patch.object(shutil, 'copyfileobj', autospec=True)
@mock.patch.object(requests, 'get', autospec=True)
def test_download_success_custom_timeout(
self, req_get_mock, shutil_mock):
cfg.CONF.set_override('webserver_connection_timeout', 15)
response_mock = req_get_mock.return_value
response_mock.status_code = http_client.OK
response_mock.raw = mock.MagicMock(spec=io.BytesIO)
file_mock = mock.Mock(spec=io.BytesIO)
self.service.download(self.href, file_mock)
shutil_mock.assert_called_once_with(
response_mock.raw.__enter__(), file_mock,
image_service.IMAGE_CHUNK_SIZE
)
req_get_mock.assert_called_once_with(self.href, stream=True,
verify=True,
timeout=15)
class FileImageServiceTestCase(base.TestCase):
def setUp(self):
super(FileImageServiceTestCase, self).setUp()
self.service = image_service.FileImageService()
self.href = 'file:///home/user/image.qcow2'
self.href_path = '/home/user/image.qcow2'
@mock.patch.object(os.path, 'isfile', return_value=True, autospec=True)
def test_validate_href(self, path_exists_mock):
self.service.validate_href(self.href)
path_exists_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os.path, 'isfile', return_value=False, autospec=True)
def test_validate_href_path_not_found_or_not_file(self, path_exists_mock):
self.assertRaises(exception.ImageRefValidationFailed,
self.service.validate_href, self.href)
path_exists_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os.path, 'getmtime', return_value=1431087909.1641912,
autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_show(self, _validate_mock, getsize_mock, getmtime_mock):
_validate_mock.return_value = self.href_path
result = self.service.show(self.href)
getsize_mock.assert_called_once_with(self.href_path)
getmtime_mock.assert_called_once_with(self.href_path)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual({'size': 42,
'updated_at': datetime.datetime(2015, 5, 8,
12, 25, 9, 164191),
'properties': {}}, result)
@mock.patch.object(os, 'link', autospec=True)
@mock.patch.object(os, 'remove', autospec=True)
@mock.patch.object(os, 'access', return_value=True, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_hard_link(self, _validate_mock, stat_mock, access_mock,
remove_mock, link_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.Mock(spec=io.BytesIO)
file_mock.name = 'file'
self.service.download(self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
remove_mock.assert_called_once_with('file')
link_mock.assert_called_once_with(self.href_path, 'file')
@mock.patch.object(os, 'sendfile', return_value=42, autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(builtins, 'open', autospec=True)
@mock.patch.object(os, 'access', return_value=False, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_copy(self, _validate_mock, stat_mock, access_mock,
open_mock, size_mock, copy_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=io.BytesIO)
file_mock.name = 'file'
input_mock = mock.MagicMock(spec=io.BytesIO)
open_mock.return_value = input_mock
self.service.download(self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
copy_mock.assert_called_once_with(file_mock.fileno(),
input_mock.__enter__().fileno(),
0, 42)
@mock.patch.object(os, 'sendfile', autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(builtins, 'open', autospec=True)
@mock.patch.object(os, 'access', return_value=False, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_copy_segmented(self, _validate_mock, stat_mock,
access_mock, open_mock, size_mock,
copy_mock):
# Fake a 3G + 1k image
chunk_size = image_service.SENDFILE_CHUNK_SIZE
fake_image_size = chunk_size * 3 + 1024
fake_chunk_seq = [chunk_size, chunk_size, chunk_size, 1024]
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=io.BytesIO)
file_mock.name = 'file'
input_mock = mock.MagicMock(spec=io.BytesIO)
open_mock.return_value = input_mock
size_mock.return_value = fake_image_size
copy_mock.side_effect = fake_chunk_seq
self.service.download(self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
copy_calls = [mock.call(file_mock.fileno(),
input_mock.__enter__().fileno(),
chunk_size * i,
fake_chunk_seq[i]) for i in range(4)]
copy_mock.assert_has_calls(copy_calls)
size_mock.assert_called_once_with(self.href_path)
@mock.patch.object(os, 'remove', side_effect=OSError, autospec=True)
@mock.patch.object(os, 'access', return_value=True, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_hard_link_fail(self, _validate_mock, stat_mock,
access_mock, remove_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=io.BytesIO)
file_mock.name = 'file'
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
@mock.patch.object(os, 'sendfile', side_effect=OSError, autospec=True)
@mock.patch.object(os.path, 'getsize', return_value=42, autospec=True)
@mock.patch.object(builtins, 'open', autospec=True)
@mock.patch.object(os, 'access', return_value=False, autospec=True)
@mock.patch.object(os, 'stat', autospec=True)
@mock.patch.object(image_service.FileImageService, 'validate_href',
autospec=True)
def test_download_copy_fail(self, _validate_mock, stat_mock, access_mock,
open_mock, size_mock, copy_mock):
_validate_mock.return_value = self.href_path
stat_mock.return_value.st_dev = 'dev1'
file_mock = mock.MagicMock(spec=io.BytesIO)
file_mock.name = 'file'
input_mock = mock.MagicMock(spec=io.BytesIO)
open_mock.return_value = input_mock
self.assertRaises(exception.ImageDownloadFailed,
self.service.download, self.href, file_mock)
_validate_mock.assert_called_once_with(mock.ANY, self.href)
self.assertEqual(2, stat_mock.call_count)
access_mock.assert_called_once_with(self.href_path, os.R_OK | os.W_OK)
size_mock.assert_called_once_with(self.href_path)
class ServiceGetterTestCase(base.TestCase):
@mock.patch.object(glance_v2_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service(self, glance_service_mock):
image_href = uuidutils.generate_uuid()
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None,
self.context)
@mock.patch.object(glance_v2_service.GlanceImageService, '__init__',
return_value=None, autospec=True)
def test_get_glance_image_service_url(self, glance_service_mock):
image_href = 'glance://%s' % uuidutils.generate_uuid()
image_service.get_image_service(image_href, context=self.context)
glance_service_mock.assert_called_once_with(mock.ANY, None,
self.context)
@mock.patch.object(image_service.HttpImageService, '__init__',
return_value=None, autospec=True)
def test_get_http_image_service(self, http_service_mock):
image_href = 'http://127.0.0.1/image.qcow2'
image_service.get_image_service(image_href)
http_service_mock.assert_called_once_with()
@mock.patch.object(image_service.HttpImageService, '__init__',
return_value=None, autospec=True)
def test_get_https_image_service(self, http_service_mock):
image_href = 'https://127.0.0.1/image.qcow2'
image_service.get_image_service(image_href)
http_service_mock.assert_called_once_with()
@mock.patch.object(image_service.FileImageService, '__init__',
return_value=None, autospec=True)
def test_get_file_image_service(self, local_service_mock):
image_href = 'file:///home/user/image.qcow2'
image_service.get_image_service(image_href)
local_service_mock.assert_called_once_with()
def test_get_image_service_invalid_image_ref(self):
invalid_refs = (
'usenet://alt.binaries.dvd/image.qcow2',
'no scheme, no uuid')
for image_ref in invalid_refs:
self.assertRaises(exception.ImageRefValidationFailed,
image_service.get_image_service, image_ref)
| 49.566836 | 78 | 0.644057 | 3,465 | 29,294 | 5.140837 | 0.075036 | 0.042665 | 0.059788 | 0.057262 | 0.882726 | 0.874137 | 0.83512 | 0.826924 | 0.807837 | 0.790602 | 0 | 0.012392 | 0.261726 | 29,294 | 590 | 79 | 49.650847 | 0.811254 | 0.019356 | 0 | 0.737044 | 0 | 0 | 0.054404 | 0.006026 | 0 | 0 | 0 | 0 | 0.174664 | 1 | 0.080614 | false | 0 | 0.026871 | 0 | 0.113244 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6aa862415c50801f0f8df033de9251bf096a0c33 | 2,258 | py | Python | asyncexec/workers/inout_worker.py | arshadansari27/async-exec-core | 4cac04e0c901014a9fd2ba0815d3a29c3c6b6c3a | [
"MIT"
] | 4 | 2017-02-15T09:25:49.000Z | 2020-08-02T17:51:27.000Z | asyncexec/workers/inout_worker.py | arshadansari27/async-exec-core | 4cac04e0c901014a9fd2ba0815d3a29c3c6b6c3a | [
"MIT"
] | 7 | 2017-06-08T18:35:14.000Z | 2021-06-01T21:54:13.000Z | asyncexec/workers/inout_worker.py | arshadansari27/async-exec-core | 4cac04e0c901014a9fd2ba0815d3a29c3c6b6c3a | [
"MIT"
] | 2 | 2017-11-23T11:39:03.000Z | 2019-12-16T06:57:27.000Z | from . import Actor
import traceback
import logging
logger = logging.getLogger(__name__)
class InOutWorker(object):
def __init__(self, loop, pool, func, publisher, consumer, ready_event, terminate_event):
self.loop = loop
self.actor = Actor(pool, func, is_generator=True)
self.client = loop.run_until_complete(self.actor.start())
self.publisher = publisher
self.consumer = consumer
self.ready_event = ready_event
self.terminate_event = terminate_event
async def start(self):
self.ready_event.data = 'InOutWorker'
self.ready_event.set()
try:
while True:
if self.publisher.empty() and self.terminate_event.is_set():
break
data = await self.publisher.publish()
response = await self.client.call.handler(data)
await self.consumer.consume(response)
if not self.terminate_event.is_set():
self.terminate_event.data = 'DONE'
self.terminate_event.set()
except:
traceback.print_exc()
exit(1)
class InOutManyWorker(object):
def __init__(self, loop, pool, func, publisher, consumer, ready_event, terminate_event):
self.loop = loop
self.actor = Actor(pool, func, is_generator=True)
self.client = loop.run_until_complete(self.actor.start())
self.publisher = publisher
self.consumer = consumer
self.ready_event = ready_event
self.terminate_event = terminate_event
async def start(self):
self.ready_event.data = 'InOutWorker'
self.ready_event.set()
try:
while True:
if self.publisher.empty() and self.terminate_event.is_set():
break
data = await self.publisher.publish()
responses = await self.client.call.handler(data)
for response in responses:
await self.consumer.consume(response)
if not self.terminate_event.is_set():
self.terminate_event.data = 'DONE'
self.terminate_event.set()
except:
traceback.print_exc()
exit(1)
| 35.84127 | 92 | 0.599646 | 249 | 2,258 | 5.24498 | 0.2249 | 0.150077 | 0.137825 | 0.061256 | 0.88974 | 0.88974 | 0.843798 | 0.843798 | 0.843798 | 0.843798 | 0 | 0.001287 | 0.31178 | 2,258 | 62 | 93 | 36.419355 | 0.839125 | 0 | 0 | 0.836364 | 0 | 0 | 0.013286 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036364 | false | 0 | 0.054545 | 0 | 0.127273 | 0.036364 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0a8e2352a522090f6524ee3153d4d5258bef9331 | 18,302 | py | Python | eagerx_interbotix/safety/node.py | eager-dev/eagerx_interbotix | 963ded1f69f6caae2d8f27a9fa53e734e683dd31 | [
"Apache-2.0"
] | null | null | null | eagerx_interbotix/safety/node.py | eager-dev/eagerx_interbotix | 963ded1f69f6caae2d8f27a9fa53e734e683dd31 | [
"Apache-2.0"
] | 1 | 2022-03-29T14:40:56.000Z | 2022-03-29T14:40:56.000Z | eagerx_interbotix/safety/node.py | eager-dev/eagerx_interbotix | 963ded1f69f6caae2d8f27a9fa53e734e683dd31 | [
"Apache-2.0"
] | null | null | null | from typing import Optional, List, Dict
from collections import deque
from urdf_parser_py.urdf import URDF
import numpy as np
# IMPORT ROS
from std_msgs.msg import Float32MultiArray, UInt64
# IMPORT EAGERX
import eagerx.core.register as register
from eagerx.utils.utils import Msg, get_attribute_from_module
from eagerx.core.entities import Node, SpaceConverter
from eagerx.core.constants import process as p
from eagerx_interbotix.safety import collision as col
class SafePositionControl(Node):
@staticmethod
@register.spec("SafePositionControl", Node)
def spec(
spec,
name: str,
rate: float,
joints: List[int],
upper: List[float],
lower: List[float],
vel_limit: List[float],
duration: Optional[float] = None,
checks: int = 2,
collision: Dict = None,
process: Optional[int] = p.NEW_PROCESS,
color: Optional[str] = "grey",
):
"""
Filters goal joint positions that cause self-collisions or are below a certain height.
Also check velocity limits.
:param spec: Not provided by user.
:param name: Node name
:param rate: Rate at which callback is called.
:param joints: joint names
:param upper: upper joint limits
:param lower: lower joint limits
:param vel_limit: absolute velocity joint limits
:param duration: time (seconds) it takes to reach the commanded positions from the current position.
:param checks: collision checks performed over the duration.
:param collision: A dict with the robot & workspace specification.
:param process: {0: NEW_PROCESS, 1: ENVIRONMENT, 2: ENGINE, 3: EXTERNAL}
:param color: console color of logged messages. {'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white', 'grey'}
:return:
"""
# Modify default node params
spec.config.name = name
spec.config.rate = rate
spec.config.process = process
spec.config.color = color
spec.config.inputs = ["goal", "current"]
spec.config.outputs = ["filtered", "in_collision"]
# Modify custom node params
spec.config.joints = joints
spec.config.upper = upper
spec.config.lower = lower
spec.config.vel_limit = vel_limit
spec.config.duration = duration if isinstance(duration, float) else 2.0 / rate
spec.config.checks = checks
# Collision detector
spec.config.collision = collision if isinstance(collision, dict) else None
# Add converter & space_converter
spec.inputs.goal.space_converter = SpaceConverter.make(
"Space_Float32MultiArray", "$(config lower)", "$(config upper)", dtype="float32"
)
spec.inputs.current.space_converter = SpaceConverter.make(
"Space_Float32MultiArray", "$(config lower)", "$(config upper)", dtype="float32"
)
spec.outputs.filtered.space_converter = SpaceConverter.make(
"Space_Float32MultiArray", "$(config lower)", "$(config upper)", dtype="float32"
)
def initialize(
self,
joints: List[str],
upper: List[float],
lower: List[float],
vel_limit: List[float],
duration: float,
checks: int,
collision: dict,
):
self.joints = joints
self.upper = np.array(upper, dtype="float")
self.lower = np.array(lower, dtype="float")
self.vel_limit = np.array(vel_limit, dtype="float")
self.duration = duration
self.checks = checks
self.dt = 1 / self.rate
# Setup collision detector
self.collision = collision
if collision is not None:
self.collision_check = True
# Setup physics server for collision checking
import pybullet
if collision.get("gui", False):
self.col_id = pybullet.connect(pybullet.GUI)
else:
self.col_id = pybullet.connect(pybullet.DIRECT)
# Load workspace
bodies = get_attribute_from_module(collision["workspace"])(self.col_id)
# Generate robot urdf (if not a path but a text file)
r = collision["robot"]
if r["urdf"].endswith(".urdf"): # Full path specified
fileName = r["urdf"]
else: # First write to /tmp file (else pybullet cannot load urdf)
import uuid # Use this to generate a unique filename
fileName = f"/tmp/{str(uuid.uuid4())}.urdf"
with open(fileName, "w") as file:
file.write(r["urdf"])
# Load robot
bodies["robot"] = pybullet.loadURDF(
fileName,
basePosition=r.get("basePosition", None),
baseOrientation=r.get("baseOrientation", None),
useFixedBase=r.get("useFixedBase", True),
flags=r.get("flags", 0),
physicsClientId=self.col_id,
)
urdf = URDF.from_xml_file(fileName)
# Determine collision pairs
self_collision_pairs, workspace_pairs = col.get_named_collision_pairs(bodies, urdf, joints)
# Create collision detector
self.self_collision = col.CollisionDetector(self.col_id, bodies, joints, self_collision_pairs)
self.workspace = col.CollisionDetector(self.col_id, bodies, joints, workspace_pairs)
# Set distance at which objects are considered in collision.
self.margin = collision.get("margin", 0.0)
# self._test_collision_tester(joints)
else:
self.collision_check = False
def _test_collision_tester(self, joints):
while True:
# compute shortest distances for a random configuration
q = np.pi * (np.random.random(len(joints)) - 0.5)
in_col = self.self_collision.in_collision(q)
print(f"[self_collision] In collision = {in_col}")
in_col = self.workspace.in_collision(q)
print(f"[workspace] In collision = {in_col}")
# wait for user to press enter to continue
input("Press <enter>!")
@register.states()
def reset(self):
self.safe_poses = deque(maxlen=10)
self.consecutive_unsafe = 0
@register.inputs(goal=Float32MultiArray, current=Float32MultiArray)
@register.outputs(filtered=Float32MultiArray, in_collision=UInt64)
def callback(self, t_n: float, goal: Msg = None, current: Msg = None):
goal = np.array(goal.msgs[-1].data, dtype="float32")
current = np.array(current.msgs[-1].data, dtype="float32")
# Setpoint last safe position
if self.collision_check:
if (
self.self_collision.in_collision(q=current, margin=self.margin)
and self.self_collision.get_distance().min() < 0
):
# rospy.loginfo(f"[self_collision]: margin = {self.margin} | ds = {self.self_collision.get_distance().min()}")
in_collision = UInt64(data=1)
elif self.workspace.in_collision(margin=self.margin) and self.workspace.get_distance().min() < 0:
# rospy.loginfo(f"[workspace]: margin = {self.margin} | ds = {self.workspace.get_distance().min()}")
in_collision = UInt64(data=2)
else:
in_collision = UInt64(data=0)
if self.consecutive_unsafe == 0:
self.safe_poses.append(current)
else:
in_collision = UInt64(data=0)
# Clip to joint limits
filtered = np.clip(goal, self.lower, self.upper, dtype="float32")
# Reduce goal to vel_limit
diff = filtered - current
too_fast = np.abs(diff / (self.duration)) > self.vel_limit
if np.any(too_fast):
filtered[too_fast] = current[too_fast] + np.sign(diff[too_fast]) * self.dt * self.vel_limit[too_fast]
if self.collision_check:
# Linearly interpolate for intermediate joint configurations
t = np.linspace(self.dt / self.checks, self.dt, self.checks)
interp = np.empty((current.shape[0], self.checks), dtype="float32")
diff = filtered - current
for i in range(current.shape[0]):
interp[i][:] = np.interp(t, [0, self.duration], [current[i], current[i] + diff[i] * 1.02])
for i in range(self.checks):
if self.self_collision.in_collision(q=interp[:, i], margin=self.margin):
filtered = self.safe_poses[-1] if len(self.safe_poses) > 0 else current
self.consecutive_unsafe += 1
break
elif self.workspace.in_collision(margin=self.margin):
filtered = self.safe_poses[-1] if len(self.safe_poses) > 0 else current
self.consecutive_unsafe += 1
break
if i + 1 == self.checks: # If all checks were successful (i.e. we did not break for-loop).
self.consecutive_unsafe = 0
else:
self.consecutive_unsafe = 0
in_collision = UInt64(data=0)
return dict(filtered=Float32MultiArray(data=filtered), in_collision=in_collision)
class SafeVelocityControl(Node):
@staticmethod
@register.spec("SafeVelocityControl", Node)
def spec(
spec,
name: str,
rate: float,
joints: List[int],
upper: List[float],
lower: List[float],
vel_limit: List[float],
duration: Optional[float] = None,
checks: int = 2,
collision: Dict = None,
process: Optional[int] = p.NEW_PROCESS,
color: Optional[str] = "grey",
):
"""
Filters goal joint positions that cause self-collisions or are below a certain height.
Also check velocity limits.
:param spec: Not provided by user.
:param name: Node name
:param rate: Rate at which callback is called.
:param joints: joint names
:param upper: upper joint limits
:param lower: lower joint limits
:param vel_limit: absolute velocity joint limits
:param duration: time (seconds) it takes to reach the commanded positions from the current position.
:param checks: collision checks performed over the duration.
:param collision: A dict with the robot & workspace specification.
:param process: {0: NEW_PROCESS, 1: ENVIRONMENT, 2: ENGINE, 3: EXTERNAL}
:param color: console color of logged messages. {'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white', 'grey'}
:return:
"""
# Modify default node params
spec.config.name = name
spec.config.rate = rate
spec.config.process = process
spec.config.color = color
spec.config.inputs = ["goal", "position", "velocity"]
spec.config.outputs = ["filtered", "in_collision"]
# Modify custom node params
spec.config.joints = joints
spec.config.upper = upper
spec.config.lower = lower
spec.config.vel_limit = vel_limit
spec.config.duration = duration if isinstance(duration, float) else 2.0 / rate
spec.config.checks = checks
# Collision detector
spec.config.collision = collision if isinstance(collision, dict) else None
# Add converter & space_converter
spec.inputs.goal.space_converter = SpaceConverter.make(
"Space_Float32MultiArray", [-v for v in vel_limit], vel_limit, dtype="float32"
)
spec.inputs.position.space_converter = SpaceConverter.make(
"Space_Float32MultiArray", "$(config lower)", "$(config upper)", dtype="float32"
)
spec.outputs.filtered.space_converter = SpaceConverter.make(
"Space_Float32MultiArray", "$(config lower)", "$(config upper)", dtype="float32"
)
def initialize(
self,
joints: List[str],
upper: List[float],
lower: List[float],
vel_limit: List[float],
duration: float,
checks: int,
collision: dict,
):
self.joints = joints
self.upper = np.array(upper, dtype="float")
self.lower = np.array(lower, dtype="float")
self.vel_limit = np.array(vel_limit, dtype="float")
self.duration = duration
self.checks = checks
self.dt = 1 / self.rate
# Setup collision detector
self.collision = collision
if collision is not None:
self.collision_check = True
# Setup physics server for collision checking
import pybullet
if collision.get("gui", False):
self.col_id = pybullet.connect(pybullet.GUI)
else:
self.col_id = pybullet.connect(pybullet.DIRECT)
# Load workspace
bodies = get_attribute_from_module(collision["workspace"])(self.col_id)
# Generate robot urdf (if not a path but a text file)
r = collision["robot"]
if r["urdf"].endswith(".urdf"): # Full path specified
fileName = r["urdf"]
else: # First write to /tmp file (else pybullet cannot load urdf)
import uuid # Use this to generate a unique filename
fileName = f"/tmp/{str(uuid.uuid4())}.urdf"
with open(fileName, "w") as file:
file.write(r["urdf"])
# Load robot
bodies["robot"] = pybullet.loadURDF(
fileName,
basePosition=r.get("basePosition", None),
baseOrientation=r.get("baseOrientation", None),
useFixedBase=r.get("useFixedBase", True),
flags=r.get("flags", 0),
physicsClientId=self.col_id,
)
urdf = URDF.from_xml_file(fileName)
# Determine collision pairs
self_collision_pairs, workspace_pairs = col.get_named_collision_pairs(bodies, urdf, joints)
# Create collision detector
self.self_collision = col.CollisionDetector(self.col_id, bodies, joints, self_collision_pairs)
self.workspace = col.CollisionDetector(self.col_id, bodies, joints, workspace_pairs)
# Set distance at which objects are considered in collision.
self.margin = collision.get("margin", 0.0)
# self._test_collision_tester(joints)
else:
self.collision_check = False
def _test_collision_tester(self, joints):
while True:
# compute shortest distances for a random configuration
q = np.pi * (np.random.random(len(joints)) - 0.5)
in_col = self.self_collision.in_collision(q)
print(f"[self_collision] In collision = {in_col}")
in_col = self.workspace.in_collision(q)
print(f"[workspace] In collision = {in_col}")
# wait for user to press enter to continue
input("Press <enter>!")
@register.states()
def reset(self):
self.safe_poses = deque(maxlen=10)
self.consecutive_unsafe = 0
@register.inputs(goal=Float32MultiArray, position=Float32MultiArray, velocity=Float32MultiArray)
@register.outputs(filtered=Float32MultiArray, in_collision=UInt64)
def callback(self, t_n: float, goal: Msg = None, position: Msg = None, velocity: Msg = None):
goal = np.array(goal.msgs[-1].data, dtype="float32")
position = np.array(position.msgs[-1].data, dtype="float32")
velocity = np.array(velocity.msgs[-1].data, dtype="float32")
# Setpoint last safe position
if self.collision_check:
if (
self.self_collision.in_collision(q=position, margin=self.margin)
and self.self_collision.get_distance().min() < 0
):
in_collision = UInt64(data=1)
elif self.workspace.in_collision(margin=self.margin) and self.workspace.get_distance().min() < 0:
in_collision = UInt64(data=2)
else:
in_collision = UInt64(data=0)
self.consecutive_unsafe = 0
self.safe_poses.append(position)
else:
in_collision = UInt64(data=0)
# Clip to velocity limits
filtered = np.clip(goal, -self.vel_limit, self.vel_limit, dtype="float32")
# Clip to joint limits
max_position = position + filtered * self.dt
clip_position = np.clip(max_position, self.lower, self.upper, dtype="float32")
filtered = (clip_position - position) / self.dt
if self.collision_check:
# Linearly interpolate for intermediate joint configurations
lin_vel = np.linspace(velocity, filtered, self.checks)
interp = np.empty((position.shape[0], self.checks + 1), dtype="float32")
interp[:, 0] = position
interp[:, -1] = clip_position
for i in range(self.checks):
interp[:, i] += lin_vel[i] * self.dt / self.checks
if i + 1 < self.checks:
interp[:, i + 1] = interp[:, i]
for i in range(self.checks + 1):
flag = self.self_collision.in_collision(q=interp[:, i], margin=self.margin)
flag = flag | self.workspace.in_collision(margin=self.margin)
if flag:
self.consecutive_unsafe += 1
if len(self.safe_poses) > 0:
idx = min(len(self.safe_poses), self.consecutive_unsafe + 1)
filtered = (self.safe_poses[-idx] - position) / self.dt
filtered = 0.5 * np.clip(filtered, -self.vel_limit, self.vel_limit, dtype="float32")
else:
filtered * 0
break
elif i + 1 == self.checks:
self.consecutive_unsafe = 0
else:
self.consecutive_unsafe = 0
in_collision = UInt64(data=0)
return dict(filtered=Float32MultiArray(data=filtered), in_collision=in_collision)
| 42.365741 | 135 | 0.595673 | 2,093 | 18,302 | 5.111323 | 0.129001 | 0.037016 | 0.010095 | 0.01963 | 0.847261 | 0.833801 | 0.816227 | 0.805384 | 0.782389 | 0.782389 | 0 | 0.01391 | 0.300787 | 18,302 | 431 | 136 | 42.464037 | 0.822068 | 0.185007 | 0 | 0.767974 | 0 | 0 | 0.066452 | 0.013427 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03268 | false | 0 | 0.045752 | 0 | 0.091503 | 0.013072 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0a9cb9552028af547cf96aa0b9d8c075f11dbda2 | 24,441 | py | Python | gnews/utils/constants.py | valorien/GNews | a488f37dd3fe9d13e469738432dcbf9cac750853 | [
"MIT"
] | 155 | 2021-02-19T11:12:27.000Z | 2022-03-25T09:24:08.000Z | gnews/utils/constants.py | valorien/GNews | a488f37dd3fe9d13e469738432dcbf9cac750853 | [
"MIT"
] | 13 | 2021-03-04T05:45:17.000Z | 2022-03-25T10:21:12.000Z | gnews/utils/constants.py | valorien/GNews | a488f37dd3fe9d13e469738432dcbf9cac750853 | [
"MIT"
] | 24 | 2021-03-19T18:01:24.000Z | 2022-03-25T09:26:15.000Z | import random
USER_AGENTS = '''Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36
Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36
Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36
Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36
Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1944.0 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.3319.102 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2117.157 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1866.237 Safari/537.36
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/4E423F
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36 Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.517 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1664.3 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.16 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1623.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.17 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.62 Safari/537.36
Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.2 Safari/537.36
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1468.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1467.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1464.0 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1500.55 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.93 Safari/537.36
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.22 (KHTML, like Gecko) Chrome/19.0.1047.0 Safari/535.22
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1042.0 Safari/535.21
Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.21 (KHTML, like Gecko) Chrome/19.0.1041.0 Safari/535.21
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/18.6.872.0 Safari/535.2 UNTRUSTED/1.0 3gpp-gba UNTRUSTED/1.0
Mozilla/5.0 (Macintosh; AMD Mac OS X 10_8_2) AppleWebKit/535.22 (KHTML, like Gecko) Chrome/18.6.872
Mozilla/5.0 (X11; CrOS i686 1660.57.0) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.46 Safari/535.19
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.45 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.166 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.151 Safari/535.19
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.19 (KHTML, like Gecko) Ubuntu/11.10 Chromium/18.0.1025.142 Chrome/18.0.1025.142 Safari/535.19
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.11 Safari/535.19
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.2) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 6.0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.66 Safari/535.11
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.04 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/10.10 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/11.10 Chromium/17.0.963.65 Chrome/17.0.963.65 Safari/535.11
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.700.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.699.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.699.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.698.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.697.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.71 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.68 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_7) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.68 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.68 Safari/534.24
Mozilla/5.0 Slackware/13.37 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/11.0.696.50
Mozilla/5.0 (Windows NT 5.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.43 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.34 Safari/534.24
Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.34 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (Windows NT 6.0) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.3 Safari/534.24
Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.14 Safari/534.24
Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.12 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.12 Safari/534.24
Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Ubuntu/10.04 Chromium/11.0.696.0 Chrome/11.0.696.0 Safari/534.24
Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.0 Safari/534.24
Mozilla/5.0 (Windows NT 6.1) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.694.0 Safari/534.24
Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.23 (KHTML, like Gecko) Chrome/11.0.686.3 Safari/534.23
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.682.0 Safari/534.21
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7_0; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Windows NT) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.672.2 Safari/534.20
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.20 (KHTML, like Gecko) Chrome/11.0.669.0 Safari/534.20
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.19 (KHTML, like Gecko) Chrome/11.0.661.0 Safari/534.19
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.18 (KHTML, like Gecko) Chrome/11.0.661.0 Safari/534.18
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.18 (KHTML, like Gecko) Chrome/11.0.660.0 Safari/534.18
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.655.0 Safari/534.17
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.655.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.654.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/11.0.652.0 Safari/534.17
Mozilla/4.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/11.0.1245.0 Safari/537.36
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/10.0.649.0 Safari/534.17
Mozilla/5.0 (Windows; U; Windows NT 6.1; de-DE) AppleWebKit/534.17 (KHTML, like Gecko) Chrome/10.0.649.0 Safari/534.17
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.82 Safari/534.16
Mozilla/5.0 (X11; U; Linux armv7l; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (X11; U; FreeBSD x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (X11; U; FreeBSD i386; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.204
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_6; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.134 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.133 Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.133 Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_2; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.127 Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.127 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru-RU; AppleWebKit/534.16; KHTML; like Gecko; Chrome/10.0.648.11;Safari/534.16)
Mozilla/5.0 (Windows; U; Windows NT 6.1; ru-RU) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.11 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.0 Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.648.0 Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.642.0 Chrome/10.0.642.0 Safari/534.16
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.639.0 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.638.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.634.0 Safari/534.16
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.634.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.16 SUSE/10.0.626.0 (KHTML, like Gecko) Chrome/10.0.626.0 Safari/534.16
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.613.0 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.613.0 Chrome/10.0.613.0 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.04 Chromium/10.0.612.3 Chrome/10.0.612.3 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Chrome/10.0.612.1 Safari/534.15
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.15 (KHTML, like Gecko) Ubuntu/10.10 Chromium/10.0.611.0 Chrome/10.0.611.0 Safari/534.15
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.602.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.601.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/10.0.601.0 Safari/534.14
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML,like Gecko) Chrome/9.1.0.0 Safari/540.0
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML, like Gecko) Ubuntu/10.10 Chrome/9.1.0.0 Safari/540.0
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/9.0.601.0 Safari/534.14
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Ubuntu/10.10 Chromium/9.0.600.0 Chrome/9.0.600.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.14 (KHTML, like Gecko) Chrome/9.0.600.0 Safari/534.14
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.599.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-CA) AppleWebKit/534.13 (KHTML like Gecko) Chrome/9.0.597.98 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.84 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.44 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.19 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.15 Safari/534.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416758524.9051
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416748405.3871
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416670950.695
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1416664997.4379
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.107 Safari/534.13 v1333515017.9196
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_5; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_4; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.597.0 Safari/534.13
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Chrome/9.0.596.0 Safari/534.13
Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Ubuntu/10.04 Chromium/9.0.595.0 Chrome/9.0.595.0 Safari/534.13
Mozilla/5.0 (X11; U; Linux i686; en-US) AppleWebKit/534.13 (KHTML, like Gecko) Ubuntu/9.10 Chromium/9.0.592.0 Chrome/9.0.592.0 Safari/534.13
Mozilla/5.0 (X11; U; Windows NT 6; en-US) AppleWebKit/534.12 (KHTML, like Gecko) Chrome/9.0.587.0 Safari/534.12
Mozilla/5.0 (Windows U Windows NT 5.1 en-US) AppleWebKit/534.12 (KHTML, like Gecko) Chrome/9.0.583.0 Safari/534.12'''.split('\n')
USER_AGENT = random.choice(USER_AGENTS)
AVAILABLE_LANGUAGES = {
"english": "en",
"indonesian": "id",
"czech": "cs",
"german": "de",
"spanish": "es-419",
"french": "fr",
"italian": "it",
"latvian": "lv",
"lithuanian": "lt",
"hungarian": "hu",
"dutch": "nl",
"norwegian": "no",
"polish": "pl",
"portuguese brasil": "pt-419",
"portuguese portugal": "pt-150",
"romanian": "ro",
"slovak": "sk",
"slovenian": "sl",
"swedish": "sv",
"vietnamese": "vi",
"turkish": "tr",
"greek": "el",
"bulgarian": "bg",
"russian": "ru",
"serbian": "sr",
"ukrainian": "uk",
"hebrew": "he",
"arabic": "ar",
"marathi": "mr",
"hindi": "hi",
"bengali": "bn",
"tamil": "ta",
"telugu": "te",
"malyalam": "ml",
"thai": "th",
"chinese simplified": "zh-Hans",
"chinese traditional": "zh-Hant",
"japanese": "ja",
"korean": "ko"
}
AVAILABLE_COUNTRIES = {
"Australia": "AU",
"Botswana": "BW",
"Canada ": "CA",
"Ethiopia": "ET",
"Ghana": "GH",
"India ": "IN",
"Indonesia": "ID",
"Ireland": "IE",
"Israel ": "IL",
"Kenya": "KE",
"Latvia": "LV",
"Malaysia": "MY",
"Namibia": "NA",
"New Zealand": "NZ",
"Nigeria": "NG",
"Pakistan": "PK",
"Philippines": "PH",
"Singapore": "SG",
"South Africa": "ZA",
"Tanzania": "TZ",
"Uganda": "UG",
"United Kingdom": "GB",
"United States": "US",
"Zimbabwe": "ZW",
"Czech Republic": "CZ",
"Germany": "DE",
"Austria": "AT",
"Switzerland": "CH",
"Argentina": "AR",
"Chile": "CL",
"Colombia": "CO",
"Cuba": "CU",
"Mexico": "MX",
"Peru": "PE",
"Venezuela": "VE",
"Belgium ": "BE",
"France": "FR",
"Morocco": "MA",
"Senegal": "SN",
"Italy": "IT",
"Lithuania": "LT",
"Hungary": "HU",
"Netherlands": "NL",
"Norway": "NO",
"Poland": "PL",
"Brazil": "BR",
"Portugal": "PT",
"Romania": "RO",
"Slovakia": "SK",
"Slovenia": "SI",
"Sweden": "SE",
"Vietnam": "VN",
"Turkey": "TR",
"Greece": "GR",
"Bulgaria": "BG",
"Russia": "RU",
"Ukraine ": "UA",
"Serbia": "RS",
"United Arab Emirates": "AE",
"Saudi Arabia": "SA",
"Lebanon": "LB",
"Egypt": "EG",
"Bangladesh": "BD",
"Thailand": "TH",
"China": "CN",
"Taiwan": "TW",
"Hong Kong": "HK",
"Japan": "JP",
"Republic of Korea": "KR"
}
GOOGLE_NEWS_URL = 'https://news.google.com'
BASE_URL = "{0}/rss".format(GOOGLE_NEWS_URL)
GOOGLE_NEWS_REGEX = f'^http(s)?://(www.)?news.google.com*'
TOPICS = ["WORLD", "NATION", "BUSINESS", "TECHNOLOGY", "ENTERTAINMENT", "SPORTS", "SCIENCE", "HEALTH"]
| 78.336538 | 244 | 0.709259 | 4,807 | 24,441 | 3.579155 | 0.093197 | 0.096774 | 0.150538 | 0.191805 | 0.877652 | 0.875443 | 0.866725 | 0.85109 | 0.84551 | 0.824179 | 0 | 0.218118 | 0.121558 | 24,441 | 311 | 245 | 78.588424 | 0.583232 | 0 | 0 | 0 | 0 | 0.609272 | 0.935968 | 0.007119 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.003311 | 0 | 0.003311 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
0ad3ce5970ff223024e8ca98d9d025d93d140614 | 9,650 | py | Python | venv/Lib/site-packages/fdutil/unittests/test_file_utils.py | avim2809/CameraSiteBlocker | bfc0434e75e8f3f95c459a4adc86b7673200816e | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/fdutil/unittests/test_file_utils.py | avim2809/CameraSiteBlocker | bfc0434e75e8f3f95c459a4adc86b7673200816e | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/fdutil/unittests/test_file_utils.py | avim2809/CameraSiteBlocker | bfc0434e75e8f3f95c459a4adc86b7673200816e | [
"Apache-2.0"
] | null | null | null | # encoding: utf-8
import os
import unittest
from future.utils import iteritems
from fdutil import file_utils
def normalise_path(self,
path):
return os.path.join(self.test_base_dir, os.sep.join(path.split(u'/')))
class TestGetFiles(unittest.TestCase):
normalise_path = normalise_path
def setUp(self):
self.test_base_dir = os.path.dirname(os.path.realpath(__file__))
self.output = []
self.file_dir = self.normalise_path(u'test_data/file_utils/get_files/')
def tearDown(self):
pass
def test_default_file_type(self):
self.output = file_utils.get_files(directory=self.file_dir)
self.output.sort()
expected_output = [self.normalise_path(path)
for path in (u'test_data/file_utils/get_files/file1.json',
u'test_data/file_utils/get_files/file1.txt',
u'test_data/file_utils/get_files/file2.json',
u'test_data/file_utils/get_files/file3.json',
u'test_data/file_utils/get_files/file4.txt',
u'test_data/file_utils/get_files/folder1')]
self.assertEqual(expected_output,
self.output,
msg=u'Test get_files: Default extension: files do not match!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
def test_default_file_type_recursive(self):
self.output = file_utils.get_files(directory=self.file_dir,
recursive=True)
self.output.sort()
expected_output = [self.normalise_path(path)
for path in [u'test_data/file_utils/get_files/file1.json',
u'test_data/file_utils/get_files/file1.txt',
u'test_data/file_utils/get_files/file2.json',
u'test_data/file_utils/get_files/file3.json',
u'test_data/file_utils/get_files/file4.txt',
u'test_data/file_utils/get_files/folder1/file5.txt',
u'test_data/file_utils/get_files/folder1/file6.json',
u'test_data/file_utils/get_files/folder1/folder2/file7.txt',
u'test_data/file_utils/get_files/folder1/folder2/file8.json']]
self.assertEqual(expected_output, self.output,
msg=u'Test get_files: Default extension recursive: files do not match!!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
def test_non_default_file_type(self):
self.output = file_utils.get_files(directory=self.file_dir,
pattern=u'*.txt')
self.output.sort()
expected_output = [self.normalise_path(path)
for path in (u'test_data/file_utils/get_files/file1.txt',
u'test_data/file_utils/get_files/file4.txt')]
self.assertEqual(expected_output, self.output,
msg=u'Test get_files: Non default extension: files do not match!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
def test_non_default_file_type_recursive(self):
self.output = file_utils.get_files(directory=self.file_dir,
pattern=u'*.txt',
recursive=True)
self.output.sort()
expected_output = [self.normalise_path(path)
for path in (u'test_data/file_utils/get_files/file1.txt',
u'test_data/file_utils/get_files/file4.txt',
u'test_data/file_utils/get_files/folder1/file5.txt',
u'test_data/file_utils/get_files/folder1/folder2/file7.txt')]
self.assertEqual(expected_output, self.output,
msg=u'Test get_files: Non default extension recursive: files do not match!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
def test_all_files(self):
self.output = file_utils.get_files(directory=self.file_dir,
pattern=u'*.*')
self.output.sort()
expected_output = [self.normalise_path(path)
for path in (u'test_data/file_utils/get_files/file1.json',
u'test_data/file_utils/get_files/file1.txt',
u'test_data/file_utils/get_files/file2.json',
u'test_data/file_utils/get_files/file3.json',
u'test_data/file_utils/get_files/file4.txt')]
self.assertEqual(expected_output,
self.output,
msg=u'Test get_files: All: files do not match!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
def test_all_files_recursive(self):
self.output = file_utils.get_files(directory=self.file_dir,
pattern=u'*.*',
recursive=True)
self.output.sort()
expected_output = [self.normalise_path(path)
for path in (u'test_data/file_utils/get_files/file1.json',
u'test_data/file_utils/get_files/file1.txt',
u'test_data/file_utils/get_files/file2.json',
u'test_data/file_utils/get_files/file3.json',
u'test_data/file_utils/get_files/file4.txt',
u'test_data/file_utils/get_files/folder1/file5.txt',
u'test_data/file_utils/get_files/folder1/file6.json',
u'test_data/file_utils/get_files/folder1/folder2/file7.txt',
u'test_data/file_utils/get_files/folder1/folder2/file8.json')]
self.assertEqual(expected_output,
self.output,
msg=u'Test get_files: All recursive: files do not match!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
class TestGetFilesDict(unittest.TestCase):
normalise_path = normalise_path
def setUp(self):
self.test_base_dir = os.path.dirname(os.path.realpath(__file__))
self.output = {}
self.file_dir = self.normalise_path(u'test_data/file_utils/get_files/')
def tearDown(self):
pass
def test_default_file_type(self):
file_utils.get_files_dict(directory=self.file_dir,
output_dict=self.output)
expected_output = {key: self.normalise_path(path)
for key, path in iteritems({u'file1': u'test_data/file_utils/get_files/file1.json',
u'file2': u'test_data/file_utils/get_files/file2.json',
u'file3': u'test_data/file_utils/get_files/file3.json'})}
self.assertEqual(expected_output, self.output,
msg=u'Test get_files_dict: Default extension: files do not match!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
def test_non_default_file_type(self):
file_utils.get_files_dict(directory=self.file_dir,
output_dict=self.output,
file_type=u'.txt')
expected_output = {key: self.normalise_path(path)
for key, path in iteritems({u'file1': u'test_data/file_utils/get_files/file1.txt',
u'file4': u'test_data/file_utils/get_files/file4.txt'})}
self.assertEqual(expected_output, self.output,
msg=u'Test get_files_dict: Non default extension: files do not match!\n'
u'expected:{expected}\n'
u'actual:{actual}'
.format(expected=expected_output,
actual=self.output))
| 48.984772 | 112 | 0.500725 | 1,018 | 9,650 | 4.501965 | 0.070727 | 0.101244 | 0.130919 | 0.185468 | 0.95396 | 0.950251 | 0.950251 | 0.950251 | 0.950251 | 0.943487 | 0 | 0.010719 | 0.410259 | 9,650 | 196 | 113 | 49.234694 | 0.794588 | 0.001554 | 0 | 0.733766 | 0 | 0 | 0.270528 | 0.204194 | 0 | 0 | 0 | 0 | 0.051948 | 1 | 0.084416 | false | 0.012987 | 0.025974 | 0.006494 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0ad915ab7a920eaa5862e1dc760518997fe46c1f | 3,663 | py | Python | app.py | SousaPedro11/workshop-refatoracao | 1182ca97af6d22c97b2c9adaa8c6cf85b78257a5 | [
"MIT"
] | null | null | null | app.py | SousaPedro11/workshop-refatoracao | 1182ca97af6d22c97b2c9adaa8c6cf85b78257a5 | [
"MIT"
] | null | null | null | app.py | SousaPedro11/workshop-refatoracao | 1182ca97af6d22c97b2c9adaa8c6cf85b78257a5 | [
"MIT"
] | null | null | null | from flask import Flask, jsonify, Response, request
from http import HTTPStatus
import db
app = Flask(__name__)
@app.route('/leiloes/<id_leilao>/', methods=['GET'])
def get_detalhes_do_leilao(id_leilao):
with db.abrir_conexao() as conexao, conexao.cursor() as cur:
cur.execute("""
SELECT id, descricao, criador, data, diferenca_minima
FROM leiloes
WHERE id = %s
""", (id_leilao, ))
leilao = cur.fetchone()
cur.execute("""
SELECT id, valor, comprador, data
FROM lances
WHERE id_leilao = %s
ORDER BY data DESC
LIMIT 1
""", (id_leilao, ))
lance = cur.fetchone()
return jsonify({
'id': leilao[0],
'descricao': leilao[1],
'criador': leilao[2],
'data': leilao[3].isoformat(),
'diferenca_minima': leilao[4],
'ultimo_lance': {
'id': lance[0],
'valor': lance[1],
'comprador': lance[2],
'data': lance[3].isoformat()
} if lance is not None else None
})
@app.route('/leiloes/<id_leilao>/lances/', methods=['POST'])
def registrar_lance(id_leilao):
dados = request.get_json()
# simulação meia boca de autenticação
id_usuario = request.headers['X-Id-Usuario']
with db.abrir_conexao() as conexao, conexao.cursor() as cur:
cur.execute("""
SELECT valor
FROM lances
WHERE id_leilao = %s
ORDER BY data DESC
LIMIT 1
""", (id_leilao, ))
ultimo_lance = cur.fetchone()
if ultimo_lance is not None and ultimo_lance[0] >= dados['valor']:
return 'Lance deve ser maior que o último.', HTTPStatus.BAD_REQUEST
cur.execute("""
INSERT INTO lances (id_leilao, valor, comprador, data)
VALUES (%s, %s, %s, now())
""", (id_leilao, dados['valor'], id_usuario))
return '', HTTPStatus.NO_CONTENT
@app.route('/leiloes/<id_leilao>/lances/minimo/', methods=['POST'])
def registrar_lance_minimo(id_leilao):
# simulação meia boca de autenticação
id_usuario = request.headers['X-Id-Usuario']
with db.abrir_conexao() as conexao, conexao.cursor() as cur:
cur.execute("""
SELECT valor
FROM lances
WHERE id_leilao = %s
ORDER BY data DESC
LIMIT 1
""", (id_leilao, ))
ultimo_lance = cur.fetchone()
valor = 1 if ultimo_lance is None else ultimo_lance[0] + 1
cur.execute("""
INSERT INTO lances (id_leilao, valor, comprador, data)
VALUES (%s, %s, %s, now())
""", (id_leilao, valor, id_usuario))
return '', HTTPStatus.NO_CONTENT
@app.route('/leiloes/proximo/', methods=['GET'])
def get_detalhes_do_proximo_leilao():
with db.abrir_conexao() as conexao, conexao.cursor() as cur:
cur.execute("""
SELECT id, descricao, criador, data, diferenca_minima
FROM leiloes
ORDER BY data
LIMIT 1
""")
leilao = cur.fetchone()
id_leilao = leilao[0]
cur.execute("""
SELECT id, valor, comprador, data
FROM lances
WHERE id_leilao = %s
ORDER BY data DESC
LIMIT 1
""", (id_leilao, ))
lance = cur.fetchone()
return jsonify({
'id': leilao[0],
'descricao': leilao[1],
'criador': leilao[2],
'data': leilao[3].isoformat(),
'diferenca_minima': leilao[4],
'ultimo_lance': {
'id': lance[0],
'valor': lance[1],
'comprador': lance[2],
'data': lance[3].isoformat()
} if lance is not None else None
})
| 31.042373 | 79 | 0.568387 | 438 | 3,663 | 4.621005 | 0.194064 | 0.086957 | 0.047431 | 0.035573 | 0.810277 | 0.771245 | 0.724308 | 0.724308 | 0.724308 | 0.724308 | 0 | 0.010886 | 0.297843 | 3,663 | 117 | 80 | 31.307692 | 0.77605 | 0.019383 | 0 | 0.788462 | 0 | 0 | 0.339092 | 0.023405 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0 | 0.028846 | 0 | 0.115385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e400868e282903612ad9e3d75a757c4a6d77f8d0 | 143 | py | Python | app/main/views/vue.py | by46/coffee | f12e1e95f12da7e322a432a6386a1147c5549c3b | [
"MIT"
] | null | null | null | app/main/views/vue.py | by46/coffee | f12e1e95f12da7e322a432a6386a1147c5549c3b | [
"MIT"
] | null | null | null | app/main/views/vue.py | by46/coffee | f12e1e95f12da7e322a432a6386a1147c5549c3b | [
"MIT"
] | null | null | null | from flask import render_template
from app.main import main
@main.route('/vue')
def vue():
return render_template('index.html')
| 15.888889 | 41 | 0.692308 | 20 | 143 | 4.85 | 0.65 | 0.28866 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.195804 | 143 | 8 | 42 | 17.875 | 0.843478 | 0 | 0 | 0 | 0 | 0 | 0.103704 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | true | 0 | 0.4 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
e403ce3ed6d4281b5463ea21fafb2d74ec790f93 | 47,497 | py | Python | src/genie/libs/parser/junos/tests/ShowInterfacesStatistics/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/junos/tests/ShowInterfacesStatistics/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/junos/tests/ShowInterfacesStatistics/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"interface-information": {
"physical-interface": [
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "5e:00:40:ff:00:00",
"hardware-physical-address": "5e:00:40:ff:00:00",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "1568",
"interface-flapped": "2020-06-22 22:33:51 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "133",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-is-primary": True,
"ifff-sendbcast-pkt-to-re": True,
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-broadcast": "172.16.1.255",
"ifa-destination": "172.16.1/24",
"ifa-flags": {
"ifaf-current-preferred": True,
"ifaf-current-primary": True,
},
"ifa-local": "172.16.1.55",
}
],
"mtu": "1500",
}
],
"allowed-host-inbound-traffic": {
"inbound-dhcp": True,
"inbound-http": True,
"inbound-https": True,
"inbound-ssh": True,
"inbound-telnet": True,
},
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000",
},
"local-index": "70",
"logical-interface-zone-name": "trust",
"name": "ge-0/0/0.0",
"snmp-index": "507",
"traffic-statistics": {
"input-packets": "4685",
"output-packets": "144",
},
}
],
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/0",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "506",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "712",
"input-pps": "1",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "143",
"name": "gr-0/0/0",
"oper-status": "Up",
"snmp-index": "519",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "144",
"name": "ip-0/0/0",
"oper-status": "Up",
"snmp-index": "520",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"interface-flapped": "2020-06-22 22:33:52 EST (1w1d 00:22 ago)",
"local-index": "145",
"name": "lsq-0/0/0",
"oper-status": "Up",
"snmp-index": "521",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"current-physical-address": "02:96:14:ff:11:43",
"hardware-physical-address": "02:96:14:ff:11:43",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-media-flags": {"ifmf-none": True},
"input-error-count": "0",
"interface-flapped": "Never",
"local-index": "147",
"name": "lt-0/0/0",
"oper-status": "Up",
"output-error-count": "0",
"snmp-index": "523",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "146",
"name": "mt-0/0/0",
"oper-status": "Up",
"snmp-index": "522",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-media-flags": {"ifmf-none": True},
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:33:52 EST (1w1d 00:22 ago)",
"local-index": "142",
"logical-interface": [
{
"address-family": [
{"address-family-name": "inet", "mtu": "9192"},
{"address-family-name": "inet6", "mtu": "9192"},
],
"local-index": "75",
"logical-interface-zone-name": "Null",
"name": "sp-0/0/0.0",
"snmp-index": "518",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0",
},
},
{
"address-family": [
{
"address-family-name": "inet",
"interface-address": [
{
"ifa-destination": "10.0.0.16",
"ifa-flags": {
"ifaf-current-preferred": True,
"ifaf-current-primary": True,
},
"ifa-local": "10.0.0.1",
},
{"ifa-flags": {}, "ifa-local": "10.0.0.6"},
{
"ifa-destination": "172.16.66.16",
"ifa-flags": {"ifaf-current-preferred": True},
"ifa-local": "172.16.64.1",
},
{"ifa-flags": {}, "ifa-local": "172.16.64.6"},
],
"mtu": "9192",
}
],
"local-index": "76",
"logical-interface-zone-name": "Null",
"name": "sp-0/0/0.16383",
"snmp-index": "524",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0",
},
},
],
"name": "sp-0/0/0",
"oper-status": "Up",
"output-error-count": "0",
"snmp-index": "517",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:7f:fd",
"hardware-physical-address": "fa:16:3e:ff:7f:fd",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "8",
"interface-flapped": "2020-06-22 22:34:01 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "134",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-broadcast": "10.70.0.255",
"ifa-destination": "40.0.0/24",
"ifa-flags": {
"ifaf-current-preferred": True,
"ifaf-current-primary": True,
},
"ifa-local": "10.70.0.4",
}
],
"mtu": "1500",
},
{
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "2001:40::/64",
"ifa-flags": {
"ifaf-current-preferred": True,
"ifaf-current-primary": True,
},
"ifa-local": "2001:40::4",
},
{
"ifa-destination": "fe80::/64",
"ifa-flags": {"ifaf-current-preferred": True},
"ifa-local": "fe80::fa16:3eff:feff:7ffd",
},
],
"mtu": "1500",
},
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000",
},
"local-index": "71",
"logical-interface-zone-name": "trust",
"name": "ge-0/0/1.0",
"snmp-index": "516",
"traffic-statistics": {
"input-packets": "555",
"output-packets": "546",
},
}
],
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/1",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "508",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "312",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:0e:52",
"hardware-physical-address": "fa:16:3e:ff:0e:52",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "2",
"interface-flapped": "2020-06-22 22:34:01 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "135",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-broadcast": "10.205.0.255",
"ifa-destination": "50.0.0/24",
"ifa-flags": {
"ifaf-current-preferred": True,
"ifaf-current-primary": True,
},
"ifa-local": "10.205.0.4",
}
],
"mtu": "1500",
},
{
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "2001:50::/64",
"ifa-flags": {
"ifaf-current-preferred": True,
"ifaf-current-primary": True,
},
"ifa-local": "2001:50::4",
},
{
"ifa-destination": "fe80::/64",
"ifa-flags": {"ifaf-current-preferred": True},
"ifa-local": "fe80::fa16:3eff:feff:e52",
},
],
"mtu": "1500",
},
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000",
},
"local-index": "72",
"logical-interface-zone-name": "trust",
"name": "ge-0/0/2.0",
"snmp-index": "525",
"traffic-statistics": {
"input-packets": "450",
"output-packets": "465",
},
}
],
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/2",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "509",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "528",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:12:8e",
"hardware-physical-address": "fa:16:3e:ff:12:8e",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:34:01 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "136",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"mtu": "1500",
},
{
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "fe80::/64",
"ifa-flags": {"ifaf-current-preferred": True},
"ifa-local": "fe80::fa16:3eff:feff:128e",
}
],
"mtu": "1500",
},
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000",
},
"local-index": "73",
"logical-interface-zone-name": "trust",
"name": "ge-0/0/3.0",
"snmp-index": "526",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0",
},
}
],
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/3",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "510",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:37:bd",
"hardware-physical-address": "fa:16:3e:ff:37:bd",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:34:02 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "137",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"mtu": "1500",
},
{
"address-family-name": "inet6",
"interface-address": [
{
"ifa-destination": "fe80::/64",
"ifa-flags": {"ifaf-current-preferred": True},
"ifa-local": "fe80::fa16:3eff:feff:37bd",
}
],
"mtu": "1500",
},
],
"encapsulation": "ENET2",
"if-config-flags": {
"iff-snmp-traps": True,
"internal-flags": "0x4000",
},
"local-index": "74",
"logical-interface-zone-name": "trust",
"name": "ge-0/0/4.0",
"snmp-index": "527",
"traffic-statistics": {
"input-packets": "787",
"output-packets": "787",
},
}
],
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/4",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "511",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:38:28",
"hardware-physical-address": "fa:16:3e:ff:38:28",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:34:02 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "138",
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/5",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "512",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:54:ea",
"hardware-physical-address": "fa:16:3e:ff:54:ea",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:34:02 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "139",
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/6",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "513",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:05:a1",
"hardware-physical-address": "fa:16:3e:ff:05:a1",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:34:02 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "140",
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/7",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "514",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"active-alarms": {"interface-alarms": {"alarm-not-present": True}},
"active-defects": {"interface-alarms": {"alarm-not-present": True}},
"admin-status": "Enabled",
"bpdu-error": "None",
"current-physical-address": "fa:16:3e:ff:a2:b7",
"hardware-physical-address": "fa:16:3e:ff:a2:b7",
"if-auto-negotiation": "Enabled",
"if-config-flags": {"iff-snmp-traps": True, "internal-flags": "0x4000"},
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-flow-control": "Enabled",
"if-media-flags": {"ifmf-none": True},
"if-remote-fault": "Online",
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:34:02 EST (1w1d 00:22 ago)",
"interface-transmit-statistics": "Disabled",
"l2pt-error": "None",
"link-level-type": "Ethernet",
"link-mode": "Full-duplex",
"local-index": "141",
"loopback": "Disabled",
"mtu": "1514",
"name": "ge-0/0/8",
"oper-status": "Up",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "515",
"source-filtering": "Disabled",
"speed": "1000mbps",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-media-flags": {"ifmf-none": True},
"input-error-count": "0",
"interface-flapped": "Never",
"local-index": "5",
"name": "dsc",
"oper-status": "Up",
"output-error-count": "0",
"snmp-index": "5",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "10",
"name": "gre",
"oper-status": "Up",
"snmp-index": "8",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "11",
"name": "ipip",
"oper-status": "Up",
"snmp-index": "9",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-media-flags": {"ifmf-none": True},
"input-error-count": "0",
"interface-flapped": "Never",
"local-index": "6",
"logical-interface": [
{
"address-family": [
{
"address-family-flags": {
"ifff-sendbcast-pkt-to-re": True
},
"address-family-name": "inet",
"interface-address": [
{
"ifa-flags": {
"ifaf-current-default": True,
"ifaf-current-primary": True,
},
"ifa-local": "10.64.4.4",
}
],
"mtu": "Unlimited",
},
{
"address-family-name": "inet6",
"interface-address": [
{
"ifa-flags": {
"ifaf-current-default": True,
"ifaf-current-primary": True,
},
"ifa-local": "fe80::5e00:400f:fc00:0",
}
],
"mtu": "Unlimited",
},
],
"local-index": "66",
"logical-interface-zone-name": "trust",
"name": "lo0.0",
"snmp-index": "16",
"traffic-statistics": {
"input-packets": "7",
"output-packets": "7",
},
},
{
"address-family": [
{
"address-family-name": "inet",
"interface-address": [
{"ifa-flags": {}, "ifa-local": "127.0.0.1"}
],
"mtu": "Unlimited",
}
],
"local-index": "65",
"logical-interface-zone-name": "Null",
"name": "lo0.16384",
"snmp-index": "21",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0",
},
},
{
"address-family": [
{
"address-family-name": "inet",
"interface-address": [
{
"ifa-flags": {
"ifaf-current-default": True,
"ifaf-current-primary": True,
},
"ifa-local": "10.0.0.1",
},
{"ifa-flags": {}, "ifa-local": "10.0.0.16"},
{"ifa-flags": {}, "ifa-local": "172.16.64.1"},
{"ifa-flags": {}, "ifa-local": "172.16.64.4"},
{"ifa-flags": {}, "ifa-local": "172.16.66.16"},
],
"mtu": "Unlimited",
}
],
"local-index": "67",
"logical-interface-zone-name": "Null",
"name": "lo0.16385",
"snmp-index": "22",
"traffic-statistics": {
"input-packets": "780",
"output-packets": "780",
},
},
{
"local-index": "64",
"logical-interface-zone-name": "Null",
"name": "lo0.32768",
"snmp-index": "248",
"traffic-statistics": {
"input-packets": "0",
"output-packets": "0",
},
},
],
"name": "lo0",
"oper-status": "Up",
"output-error-count": "0",
"snmp-index": "6",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-media-flags": {"ifmf-none": True},
"input-error-count": "0",
"interface-flapped": "Never",
"local-index": "4",
"name": "lsi",
"oper-status": "Up",
"output-error-count": "0",
"snmp-index": "4",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "64",
"name": "mtun",
"oper-status": "Up",
"snmp-index": "12",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "26",
"name": "pimd",
"oper-status": "Up",
"snmp-index": "11",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "25",
"name": "pime",
"oper-status": "Up",
"snmp-index": "10",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-media-flags": {"ifmf-none": True},
"input-error-count": "0",
"local-index": "128",
"name": "pp0",
"oper-status": "Up",
"output-error-count": "0",
"snmp-index": "501",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "130",
"name": "ppd0",
"oper-status": "Up",
"snmp-index": "503",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "131",
"name": "ppe0",
"oper-status": "Up",
"snmp-index": "504",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"local-index": "129",
"name": "st0",
"oper-status": "Up",
"snmp-index": "502",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
{
"admin-status": "Enabled",
"if-device-flags": {"ifdf-present": True, "ifdf-running": True},
"if-media-flags": {"ifmf-none": True},
"input-error-count": "0",
"interface-flapped": "Never",
"local-index": "12",
"name": "tap",
"oper-status": "Up",
"output-error-count": "0",
"snmp-index": "7",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
},
{
"admin-status": "Enabled",
"current-physical-address": "fa:16:3e:ff:a2:b7",
"hardware-physical-address": "fa:16:3e:ff:a2:b7",
"input-error-count": "0",
"interface-flapped": "2020-06-22 22:29:08 EST (1w1d 00:26 ago)",
"local-index": "132",
"name": "vlan",
"oper-status": "Down",
"output-error-count": "0",
"physical-interface-cos-information": {
"physical-interface-cos-hw-max-queues": "8",
"physical-interface-cos-use-max-queues": "8",
},
"snmp-index": "505",
"statistics-cleared": "2020-06-30 22:23:44 EST (00:32:21 ago)",
"traffic-statistics": {
"input-bps": "0",
"input-pps": "0",
"output-bps": "0",
"output-pps": "0",
},
},
]
}
}
| 46.068865 | 88 | 0.3392 | 3,614 | 47,497 | 4.457665 | 0.078583 | 0.030726 | 0.042334 | 0.041403 | 0.917877 | 0.906331 | 0.897765 | 0.883923 | 0.833644 | 0.804407 | 0 | 0.080861 | 0.506853 | 47,497 | 1,030 | 89 | 46.113592 | 0.606925 | 0 | 0 | 0.609709 | 0 | 0 | 0.370129 | 0.05733 | 0 | 0 | 0.001769 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7c111701775f7a470d031b373cda59d34c43aaaa | 11,197 | py | Python | tests/EVM/test_EVMBLOCKHASH.py | mroll/manticore | d731562f7761ed9437cce406b24c815303de370c | [
"Apache-2.0"
] | null | null | null | tests/EVM/test_EVMBLOCKHASH.py | mroll/manticore | d731562f7761ed9437cce406b24c815303de370c | [
"Apache-2.0"
] | null | null | null | tests/EVM/test_EVMBLOCKHASH.py | mroll/manticore | d731562f7761ed9437cce406b24c815303de370c | [
"Apache-2.0"
] | null | null | null |
import struct
import unittest
import json
from manticore.platforms import evm
from manticore.core import state
from manticore.core.smtlib import Operators, ConstraintSet
import os
class EVMTest_BLOCKHASH(unittest.TestCase):
_multiprocess_can_split_ = True
maxDiff=None
def _execute(self, new_vm):
last_returned = None
last_exception = None
try:
new_vm.execute()
except evm.Stop, e:
last_exception = "STOP"
except evm.NotEnoughGas:
last_exception = "OOG"
except evm.StackUnderflow:
last_exception = "INSUFICIENT STACK"
except evm.InvalidOpcode:
last_exception = "INVALID"
except evm.SelfDestruct:
last_exception = "SUICIDED"
except evm.Return as e:
last_exception = "RETURN"
last_returned = e.data
except evm.Revert:
last_exception = "REVERT"
return last_exception, last_returned
def test_BLOCKHASH_1(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_BLOCKHASH_2(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
#Currently the hash("1NONCE")
self.assertEqual(new_vm.stack, [4191156306509761637738076877631970127621839175651556722833009931314104461609L])
def test_BLOCKHASH_3(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_BLOCKHASH_4(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_BLOCKHASH_5(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_BLOCKHASH_6(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_BLOCKHASH_7(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_BLOCKHASH_8(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_BLOCKHASH_9(self):
#Make the constraint store
constraints = ConstraintSet()
#make the ethereum world state
world = evm.EVMWorld(constraints)
address=0x222222222222222222222222222222222222200
caller=origin=0x111111111111111111111111111111111111100
price=0
value=10000
bytecode='@'
data = 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
header = { 'coinbase': 0,
'timestamp': 0,
'number': 0,
'difficulty': 0,
'gaslimit': 0,
}
gas = 1000000
new_vm = evm.EVM(constraints, address, origin, price, data, caller, value, bytecode, header, gas=gas, global_storage=world.storage)
new_vm._push(6089590155545428825848686802984512581899718912L)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
if __name__ == '__main__':
unittest.main()
| 39.565371 | 143 | 0.563454 | 952 | 11,197 | 6.47479 | 0.110294 | 0.038125 | 0.052563 | 0.058404 | 0.826898 | 0.822842 | 0.822842 | 0.822842 | 0.822842 | 0.822842 | 0 | 0.176806 | 0.358489 | 11,197 | 282 | 144 | 39.705674 | 0.681331 | 0.045905 | 0 | 0.735931 | 0 | 0 | 0.066304 | 0.025321 | 0 | 0 | 0.069211 | 0 | 0.116883 | 0 | null | null | 0 | 0.030303 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7c27ac7bbc9611fcceb6b45e3804ca8cfee4e1ea | 5,351 | py | Python | tests/test_provider_iwarapter_pingfederate.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 507 | 2017-07-26T02:58:38.000Z | 2022-01-21T12:35:13.000Z | tests/test_provider_iwarapter_pingfederate.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 135 | 2017-07-20T12:01:59.000Z | 2021-10-04T22:25:40.000Z | tests/test_provider_iwarapter_pingfederate.py | mjuenema/python-terrascript | 6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d | [
"BSD-2-Clause"
] | 81 | 2018-02-20T17:55:28.000Z | 2022-01-31T07:08:40.000Z | # tests/test_provider_iwarapter_pingfederate.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:24:44 UTC)
def test_provider_import():
import terrascript.provider.iwarapter.pingfederate
def test_resource_import():
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_authentication_api_application,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_authentication_api_settings,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_authentication_policies,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_authentication_policies_settings,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_authentication_policy_contract,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_authentication_policy_fragment,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_authentication_selector,
)
from terrascript.resource.iwarapter.pingfederate import pingfederate_certificates_ca
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_certificates_revocation_settings,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_custom_data_store,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_extended_properties,
)
from terrascript.resource.iwarapter.pingfederate import pingfederate_idp_adapter
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_idp_sp_connection,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_incoming_proxy_settings,
)
from terrascript.resource.iwarapter.pingfederate import pingfederate_jdbc_data_store
from terrascript.resource.iwarapter.pingfederate import pingfederate_kerberos_realm
from terrascript.resource.iwarapter.pingfederate import pingfederate_keypair_signing
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_keypair_signing_csr,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_keypair_ssl_server,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_keypair_ssl_server_csr,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_keypair_ssl_server_settings,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_keypairs_oauth_openid_connect,
)
from terrascript.resource.iwarapter.pingfederate import pingfederate_ldap_data_store
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_notification_publisher,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_oauth_access_token_manager,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_oauth_access_token_mappings,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_oauth_auth_server_settings,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_oauth_authentication_policy_contract_mapping,
)
from terrascript.resource.iwarapter.pingfederate import pingfederate_oauth_client
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_oauth_openid_connect_policy,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_oauth_resource_owner_credentials_mappings,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_password_credential_validator,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_pingone_connection,
)
from terrascript.resource.iwarapter.pingfederate import pingfederate_server_settings
from terrascript.resource.iwarapter.pingfederate import pingfederate_sp_adapter
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_sp_authentication_policy_contract_mapping,
)
from terrascript.resource.iwarapter.pingfederate import (
pingfederate_sp_idp_connection,
)
def test_datasource_import():
from terrascript.data.iwarapter.pingfederate import pingfederate_custom_data_store
from terrascript.data.iwarapter.pingfederate import pingfederate_jdbc_data_store
from terrascript.data.iwarapter.pingfederate import pingfederate_keypair_signing_csr
from terrascript.data.iwarapter.pingfederate import (
pingfederate_keypair_ssl_server_csr,
)
from terrascript.data.iwarapter.pingfederate import pingfederate_ldap_data_store
from terrascript.data.iwarapter.pingfederate import pingfederate_version
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.iwarapter.pingfederate
#
# t = terrascript.provider.iwarapter.pingfederate.pingfederate()
# s = str(t)
#
# assert 'https://github.com/iwarapter/terraform-provider-pingfederate' in s
# assert '0.0.21' in s
| 31.662722 | 88 | 0.779294 | 521 | 5,351 | 7.733205 | 0.203455 | 0.244974 | 0.288161 | 0.416232 | 0.830727 | 0.807893 | 0.807893 | 0.78208 | 0.54902 | 0.164557 | 0 | 0.003591 | 0.167445 | 5,351 | 168 | 89 | 31.85119 | 0.900786 | 0.09886 | 0 | 0.285714 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005952 | 0 | 1 | 0.028571 | true | 0.009524 | 0.447619 | 0 | 0.47619 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 10 |
7c47903543311803a016254a8643239bc0f1c5ab | 93 | py | Python | gym_ball/envs/__init__.py | o0olele/gym-ball | 4fae171745d8b7f29ca44b1750e71c2243924706 | [
"Apache-2.0"
] | 4 | 2021-09-28T11:48:32.000Z | 2021-12-22T07:41:59.000Z | gym_ball/envs/__init__.py | o0olele/gym-ball | 4fae171745d8b7f29ca44b1750e71c2243924706 | [
"Apache-2.0"
] | null | null | null | gym_ball/envs/__init__.py | o0olele/gym-ball | 4fae171745d8b7f29ca44b1750e71c2243924706 | [
"Apache-2.0"
] | 1 | 2021-12-22T07:42:01.000Z | 2021-12-22T07:42:01.000Z | from gym_ball.envs.ball_env import BallEnv
from gym_ball.envs.ball_eat_env import BallEatEnv
| 31 | 49 | 0.870968 | 17 | 93 | 4.470588 | 0.529412 | 0.184211 | 0.289474 | 0.394737 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.086022 | 93 | 2 | 50 | 46.5 | 0.894118 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
7c5a64cee7d7b8c04a08e374859ec0462cb11f7d | 29,173 | py | Python | openregistry/lots/loki/tests/blanks/auction_blanks.py | oleksiyVeretiuk/openregistry.lots.loki | 0039c101fca4732e96904b93ac2efcbdd338900f | [
"Apache-2.0"
] | null | null | null | openregistry/lots/loki/tests/blanks/auction_blanks.py | oleksiyVeretiuk/openregistry.lots.loki | 0039c101fca4732e96904b93ac2efcbdd338900f | [
"Apache-2.0"
] | null | null | null | openregistry/lots/loki/tests/blanks/auction_blanks.py | oleksiyVeretiuk/openregistry.lots.loki | 0039c101fca4732e96904b93ac2efcbdd338900f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
from copy import deepcopy
from datetime import timedelta
from openregistry.lots.core.utils import (
get_now,
calculate_business_date
)
from openregistry.lots.core.models import Period
from openregistry.lots.loki.models import Lot
from openregistry.lots.core.constants import SANDBOX_MODE
from openregistry.lots.loki.constants import DEFAULT_DUTCH_STEPS, DEFAULT_REGISTRATION_FEE
from openregistry.lots.loki.tests.base import (
create_single_lot,
check_patch_status_200,
add_decisions,
add_auctions
)
def patch_auctions_with_lot(self):
self.app.authorization = ('Basic', ('broker', ''))
response = create_single_lot(self, self.initial_data)
lot = response.json['data']
token = response.json['access']['token']
access_header = {'X-Access-Token': str(token)}
# Move from 'draft' to 'pending' status
check_patch_status_200(self, '/{}'.format(lot['id']), 'composing', access_header)
add_auctions(self, lot, access_header)
check_patch_status_200(self, '/{}'.format(lot['id']), 'verification', access_header)
self.app.authorization = ('Basic', ('concierge', ''))
check_patch_status_200(self, '/{}'.format(lot['id']), 'verification')
add_decisions(self, lot)
check_patch_status_200(self, '/{}'.format(lot['id']), 'pending')
self.app.authorization = ('Basic', ('broker', ''))
data = deepcopy(lot)
del data['decisions']
del data['status']
data['auctions'][0]['tenderAttempts'] = 3
data['auctions'][0]['procurementMethodType'] = 'sellout.insider'
response = self.app.patch_json(
'/{}'.format(lot['id']),
headers=access_header,
params={'data': data},
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['auctions'][0]['tenderAttempts'], 1)
self.assertEqual(response.json['data']['auctions'][0]['procurementMethodType'], 'sellout.english')
response = self.app.get('/{}/auctions'.format(lot['id']))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
self.assertEqual(english['tenderAttempts'], 1)
self.assertEqual(english['procurementMethodType'], 'sellout.english')
def patch_auction_by_concierge(self):
data = deepcopy(self.initial_auctions_data)
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
data['english']['minimalStep']['amount'] = 99
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': data['english']
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['id'], english['id'])
self.assertEqual(response.json['data']['tenderAttempts'], 1)
self.assertEqual(response.json['data']['value'], data['english']['value'])
self.assertEqual(response.json['data']['minimalStep'], data['english']['minimalStep'])
self.assertEqual(response.json['data']['auctionPeriod'], data['english']['auctionPeriod'])
self.assertEqual(response.json['data']['guarantee'], data['english']['guarantee'])
self.assertEqual(response.json['data']['registrationFee'], data['english']['registrationFee'])
self.assertNotIn('dutchSteps', response.json['data']['auctionParameters'])
self.app.authorization = ('Basic', ('concierge', ''))
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': {
'status': 'unsuccessful',
'auctionID': 'someAuctionID',
'relatedProcessID': '1' * 32
}
})
self.assertEqual(response.json['data']['status'], 'unsuccessful')
self.assertEqual(response.json['data']['auctionID'], 'someAuctionID')
self.assertEqual(response.json['data']['relatedProcessID'], '1' * 32)
def patch_english_auction(self):
data = deepcopy(self.initial_auctions_data)
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
data['english']['minimalStep']['amount'] = 99
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': data['english']
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['id'], english['id'])
self.assertEqual(response.json['data']['tenderAttempts'], 1)
self.assertEqual(response.json['data']['value'], data['english']['value'])
self.assertEqual(response.json['data']['minimalStep'], data['english']['minimalStep'])
self.assertEqual(response.json['data']['auctionPeriod'], data['english']['auctionPeriod'])
self.assertEqual(response.json['data']['guarantee'], data['english']['guarantee'])
self.assertEqual(response.json['data']['registrationFee'], data['english']['registrationFee'])
self.assertNotIn('dutchSteps', response.json['data']['auctionParameters'])
default_type = response.json['data']['auctionParameters']['type']
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
second_english = auctions[1]
insider = auctions[2]
# Test first sellout.english
self.assertEqual(english['procurementMethodType'], 'sellout.english')
self.assertEqual(english['value']['amount'], data['english']['value']['amount'])
self.assertEqual(english['registrationFee']['amount'], data['english']['registrationFee']['amount'])
self.assertEqual(english['minimalStep']['amount'], data['english']['minimalStep']['amount'])
self.assertEqual(english['guarantee']['amount'], data['english']['guarantee']['amount'])
self.assertEqual(english['auctionParameters']['type'], 'english')
self.assertNotIn('dutchSteps', english['auctionParameters'])
self.assertNotIn('tenderingDuration', english)
# Test second sellout.english(half values)
self.assertEqual(second_english['procurementMethodType'], 'sellout.english')
self.assertEqual(second_english['value']['amount'], round(english['value']['amount'] / 2, 2))
self.assertEqual(second_english['registrationFee']['amount'], english['registrationFee']['amount'])
self.assertEqual(second_english['minimalStep']['amount'], english['minimalStep']['amount'] / 2)
self.assertEqual(second_english['guarantee']['amount'], round(english['guarantee']['amount'] / 2, 2))
self.assertEqual(second_english['auctionParameters']['type'], 'english')
self.assertNotIn('dutchSteps', second_english['auctionParameters'])
# Test second sellout.insider(half values)
self.assertEqual(insider['procurementMethodType'], 'sellout.insider')
self.assertEqual(insider['value']['amount'], round(english['value']['amount'] / 2, 2))
self.assertEqual(insider['registrationFee']['amount'], english['registrationFee']['amount'])
self.assertEqual(insider['minimalStep']['amount'], 0)
self.assertEqual(insider['guarantee']['amount'], round(english['guarantee']['amount'] / 2, 2))
self.assertEqual(insider['auctionParameters']['type'], 'insider')
self.assertEqual(insider['auctionParameters']['dutchSteps'], DEFAULT_DUTCH_STEPS)
# Test change tenderingDuration
data['english']['minimalStep']['amount'] = 100
data['english']['tenderingDuration'] = 'P2YT3H'
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': data['english']
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['tenderAttempts'], 1)
self.assertNotIn('tenderingDuration', response.json['data'])
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
second_english = auctions[1]
insider = auctions[2]
# Test first sellout.english
self.assertEqual(english['procurementMethodType'], 'sellout.english')
self.assertEqual(english['value']['amount'], data['english']['value']['amount'])
self.assertEqual(english['registrationFee']['amount'], data['english']['registrationFee']['amount'])
self.assertEqual(english['minimalStep']['amount'], data['english']['minimalStep']['amount'])
self.assertEqual(english['guarantee']['amount'], data['english']['guarantee']['amount'])
self.assertEqual(english['auctionParameters']['type'], 'english')
self.assertNotIn('dutchSteps', english['auctionParameters'])
self.assertNotIn('tenderingDuration', english)
# Test second sellout.english(half values)
self.assertEqual(second_english['procurementMethodType'], 'sellout.english')
self.assertEqual(second_english['value']['amount'], round(english['value']['amount'] / 2, 2))
self.assertEqual(second_english['registrationFee']['amount'], english['registrationFee']['amount'])
self.assertEqual(second_english['minimalStep']['amount'], english['minimalStep']['amount'] / 2)
self.assertEqual(second_english['guarantee']['amount'], round(english['guarantee']['amount'] / 2, 2))
self.assertEqual(second_english['auctionParameters']['type'], 'english')
self.assertNotIn('dutchSteps', second_english['auctionParameters'])
# Test second sellout.insider(half values)
self.assertEqual(insider['procurementMethodType'], 'sellout.insider')
self.assertEqual(insider['value']['amount'], round(english['value']['amount'] / 2, 2))
self.assertEqual(insider['registrationFee']['amount'], english['registrationFee']['amount'])
self.assertEqual(insider['minimalStep']['amount'], 0)
self.assertEqual(insider['guarantee']['amount'], round(english['guarantee']['amount'] / 2, 2))
self.assertEqual(insider['auctionParameters']['type'], 'insider')
self.assertEqual(insider['auctionParameters']['dutchSteps'], DEFAULT_DUTCH_STEPS)
# Test change steps validation
data['english']['auctionParameters'] = {'dutchSteps': 66}
response = self.app.patch_json(
'/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': data['english']
},
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json, None)
response = self.app.get(
'/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header,
)
self.assertNotIn('dutchSteps', response.json['data']['auctionParameters'])
# Test type validation
data['english']['auctionParameters'] = {'type': 'insider'}
response = self.app.patch_json(
'/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': data['english']
},
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json, None)
response = self.app.get(
'/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotEqual(response.json['data']['auctionParameters']['type'], data['english']['auctionParameters']['type'])
self.assertEqual(response.json['data']['auctionParameters']['type'], default_type)
def patch_second_english_auction(self):
data = deepcopy(self.initial_auctions_data)
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
second_english = auctions[1]
second_english['tenderingDuration'] = 'P2YT3H'
default_type = second_english['auctionParameters']['type']
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, second_english['id']),
headers=self.access_header, params={
'data': data['second.english']
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['tenderingDuration'], data['second.english']['tenderingDuration'])
self.assertEqual(response.json['data']['tenderAttempts'], 2)
self.assertNotIn('dutchSteps', response.json['data']['auctionParameters'])
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
second_english = auctions[1]
insider = auctions[2]
# Test second sellout.english(half values)
self.assertEqual(second_english['auctionParameters']['type'], 'english')
self.assertEqual(second_english['tenderingDuration'], data['second.english']['tenderingDuration'])
self.assertNotIn('dutchSteps', second_english['auctionParameters'])
# Test second sellout.insider(half values)
self.assertEqual(insider['procurementMethodType'], 'sellout.insider')
self.assertEqual(insider['tenderingDuration'], second_english['tenderingDuration'])
self.assertEqual(insider['auctionParameters']['dutchSteps'], DEFAULT_DUTCH_STEPS)
# Test dutch steps validation
data = deepcopy(self.initial_auctions_data)
data['second.english']['auctionParameters'] = {'dutchSteps': 66}
response = self.app.patch_json(
'/{}/auctions/{}'.format(self.resource_id, second_english['id']),
headers=self.access_header, params={
'data': data['english']
},
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json, None)
response = self.app.get(
'/{}/auctions/{}'.format(self.resource_id, second_english['id']),
headers=self.access_header,
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotIn('dutchSteps', response.json['data']['auctionParameters'])
# Test type validation
data = deepcopy(self.initial_auctions_data)
data['second.english']['auctionParameters'] = {'type': 'insider'}
response = self.app.patch_json(
'/{}/auctions/{}'.format(self.resource_id, insider['id']),
headers=self.access_header, params={
'data': data['second.english']
},
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json, None)
response = self.app.get(
'/{}/auctions/{}'.format(self.resource_id, second_english['id']),
headers=self.access_header,
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotEqual(response.json['data']['auctionParameters']['type'],
data['second.english']['auctionParameters']['type'])
self.assertEqual(response.json['data']['auctionParameters']['type'], default_type)
def patch_insider_auction(self):
data = deepcopy(self.initial_auctions_data)
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
insider = auctions[2]
data_dutch_steps = {'auctionParameters': {'dutchSteps': 77}}
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, insider['id']),
headers=self.access_header, params={
'data': data_dutch_steps
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['auctionParameters']['dutchSteps'], data_dutch_steps['auctionParameters']['dutchSteps'])
self.assertNotIn('tenderingDuration', response.json['data'])
self.assertEqual(response.json['data']['tenderAttempts'], 3)
default_type = response.json['data']['auctionParameters']['type']
data_with_tenderingDuration = {
'tenderingDuration': 'P2YT3H',
'auctionParameters': {'dutchSteps': 88}
}
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, insider['id']),
headers=self.access_header, params={
'data': data_with_tenderingDuration
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotIn('tenderingDuration', response.json['data'])
self.assertEqual(response.json['data']['tenderAttempts'], 3)
# Test type validation
data = deepcopy(self.initial_auctions_data)
data['insider'] = {}
data['insider']['auctionParameters'] = {'type': 'english'}
response = self.app.patch_json(
'/{}/auctions/{}'.format(self.resource_id, insider['id']),
headers=self.access_header, params={
'data': data['english']
},
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json, None)
response = self.app.get(
'/{}/auctions/{}'.format(self.resource_id, insider['id']),
headers=self.access_header
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotEqual(response.json['data']['auctionParameters']['type'],
data['insider']['auctionParameters']['type'])
self.assertEqual(response.json['data']['auctionParameters']['type'], default_type)
def rectificationPeriod_auction_workflow(self):
rectificationPeriod = Period()
rectificationPeriod.startDate = get_now() - timedelta(3)
rectificationPeriod.endDate = calculate_business_date(rectificationPeriod.startDate,
timedelta(1),
None)
data = deepcopy(self.initial_auctions_data)
lot = self.create_resource()
# Change rectification period in db
fromdb = self.db.get(lot['id'])
fromdb = Lot(fromdb)
fromdb.status = 'pending'
fromdb.rectificationPeriod = rectificationPeriod
fromdb = fromdb.store(self.db)
self.assertEqual(fromdb.id, lot['id'])
response = self.app.get('/{}'.format(lot['id']))
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.json['data']['id'], lot['id'])
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
response = self.app.patch_json('/{}/auctions/{}'.format(lot['id'], english['id']),
headers=self.access_header,
params={'data': data['english']},
status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.json['errors'][0]['description'], 'You can\'t change auctions after rectification period')
@unittest.skipIf(not SANDBOX_MODE, 'If sandbox mode is enabled auctionParameters has additional field procurementMethodDetails')
def procurementMethodDetails_check_with_sandbox(self):
# Test procurementMethodDetails after creating lot
response = self.app.get('/{}'.format(self.resource_id))
lot = response.json['data']
english = response.json['data']['auctions'][0]
second_english = response.json['data']['auctions'][1]
insider = response.json['data']['auctions'][2]
self.assertNotIn(
'procurementMethodDetails',
english
)
self.assertNotIn(
'procurementMethodDetails',
second_english
)
self.assertNotIn(
'procurementMethodDetails',
insider
)
auction_param_with_procurementMethodDetails = {'procurementMethodDetails': 'quick'}
# Test procurementMethodDetails after update second english
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], second_english['id']),
{'data': auction_param_with_procurementMethodDetails},
headers=self.access_header
)
self.assertEqual(
response.json['data']['procurementMethodDetails'],
auction_param_with_procurementMethodDetails['procurementMethodDetails']
)
# Test procurementMethodDetails after update insider
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], insider['id']),
{'data': auction_param_with_procurementMethodDetails},
headers=self.access_header
)
self.assertEqual(
response.json['data']['procurementMethodDetails'],
auction_param_with_procurementMethodDetails['procurementMethodDetails']
)
# Test procurementMethodDetails after update english
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], english['id']),
{'data': auction_param_with_procurementMethodDetails},
headers=self.access_header
)
self.assertEqual(
response.json['data']['procurementMethodDetails'],
auction_param_with_procurementMethodDetails['procurementMethodDetails']
)
@unittest.skipIf(SANDBOX_MODE, 'If sandbox mode is disabled auctionParameters has not procurementMethodDetails field')
def procurementMethodDetails_check_without_sandbox(self):
# Test procurementMethodDetails after creating lot
response = self.app.get('/{}'.format(self.resource_id))
lot = response.json['data']
english = response.json['data']['auctions'][0]
second_english = response.json['data']['auctions'][1]
insider = response.json['data']['auctions'][2]
self.assertNotIn(
'procurementMethodDetails',
response.json['data']['auctions'][0],
)
self.assertNotIn(
'procurementMethodDetails',
response.json['data']['auctions'][1],
)
self.assertNotIn(
'procurementMethodDetails',
response.json['data']['auctions'][2],
)
auction_param_with_procurementMethodDetails = {'procurementMethodDetails': 'quick'}
# Test procurementMethodDetails error while updating english
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], english['id']),
{'data': auction_param_with_procurementMethodDetails},
headers=self.access_header,
status=422
)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]['description'], u'Rogue field')
self.assertEqual(response.json['errors'][0]['name'], 'procurementMethodDetails')
# Test procurementMethodDetails error while updating english
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], second_english['id']),
{'data': auction_param_with_procurementMethodDetails},
headers=self.access_header,
status=422
)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]['description'], u'Rogue field')
self.assertEqual(response.json['errors'][0]['name'], 'procurementMethodDetails')
# Test procurementMethodDetails error while updating english
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], insider['id']),
{'data': auction_param_with_procurementMethodDetails},
headers=self.access_header,
status=422
)
self.assertEqual(response.status, '422 Unprocessable Entity')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]['description'], u'Rogue field')
self.assertEqual(response.json['errors'][0]['name'], 'procurementMethodDetails')
# submissionMethodDetails test
def submissionMethodDetails_check(self):
# Test submissionMethodDetails after creating lot
response = self.app.get('/{}'.format(self.resource_id))
lot = response.json['data']
english = response.json['data']['auctions'][0]
second_english = response.json['data']['auctions'][1]
insider = response.json['data']['auctions'][2]
self.assertNotIn(
'submissionMethodDetails',
english
)
self.assertNotIn(
'submissionMethodDetails',
second_english
)
self.assertNotIn(
'submissionMethodDetails',
insider
)
auction_param_with_submissionMethodDetails = {'submissionMethodDetails': 'quick(mode:fast-forward)'}
# Test submissionMethodDetails after update second english
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], second_english['id']),
{'data': auction_param_with_submissionMethodDetails},
headers=self.access_header
)
self.assertEqual(
response.json['data']['submissionMethodDetails'],
auction_param_with_submissionMethodDetails['submissionMethodDetails']
)
# Test submissionMethodDetails after update insider
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], insider['id']),
{'data': auction_param_with_submissionMethodDetails},
headers=self.access_header
)
self.assertEqual(
response.json['data']['submissionMethodDetails'],
auction_param_with_submissionMethodDetails['submissionMethodDetails']
)
# Test submissionMethodDetails after update english
response = self.app.patch_json(
'/{}/auctions/{}'.format(lot['id'], english['id']),
{'data': auction_param_with_submissionMethodDetails},
headers=self.access_header
)
self.assertEqual(
response.json['data']['submissionMethodDetails'],
auction_param_with_submissionMethodDetails['submissionMethodDetails']
)
def registrationFee_default(self):
# Check default registrationFee.amount
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
second_english = auctions[1]
insider = auctions[2]
self.assertEqual(english['registrationFee']['amount'], DEFAULT_REGISTRATION_FEE)
self.assertEqual(second_english['registrationFee']['amount'], DEFAULT_REGISTRATION_FEE)
self.assertEqual(insider['registrationFee']['amount'], DEFAULT_REGISTRATION_FEE)
# Change registrationFee
data = {
'registrationFee': {'amount': 100}
}
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': data
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(
response.json['data']['registrationFee']['amount'],
data['registrationFee']['amount']
)
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
second_english = auctions[1]
insider = auctions[2]
self.assertEqual(english['registrationFee']['amount'], data['registrationFee']['amount'])
self.assertEqual(second_english['registrationFee']['amount'], data['registrationFee']['amount'])
self.assertEqual(insider['registrationFee']['amount'], data['registrationFee']['amount'])
# Patch registrationFee to None
data = {
'registrationFee': None
}
response = self.app.patch_json('/{}/auctions/{}'.format(self.resource_id, english['id']),
headers=self.access_header, params={
'data': data
})
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['data']['registrationFee']['amount'], DEFAULT_REGISTRATION_FEE)
response = self.app.get('/{}/auctions'.format(self.resource_id))
auctions = sorted(response.json['data'], key=lambda a: a['tenderAttempts'])
english = auctions[0]
second_english = auctions[1]
insider = auctions[2]
self.assertEqual(english['registrationFee']['amount'], DEFAULT_REGISTRATION_FEE)
self.assertEqual(second_english['registrationFee']['amount'], DEFAULT_REGISTRATION_FEE)
self.assertEqual(insider['registrationFee']['amount'], DEFAULT_REGISTRATION_FEE)
| 44.201515 | 131 | 0.680012 | 2,959 | 29,173 | 6.595471 | 0.0561 | 0.113753 | 0.109602 | 0.067791 | 0.868057 | 0.835776 | 0.813794 | 0.790992 | 0.769676 | 0.758455 | 0 | 0.008293 | 0.156755 | 29,173 | 659 | 132 | 44.268589 | 0.785041 | 0.04463 | 0 | 0.701887 | 0 | 0 | 0.239897 | 0.03305 | 0 | 0 | 0 | 0 | 0.330189 | 1 | 0.018868 | false | 0 | 0.016981 | 0 | 0.035849 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7c662971cbd7ff1ba73a52e59fc57e5a5b098410 | 352,420 | py | Python | nbdev_django/_modidx.py | fastai/nbdev-stdlib | 8a956e40ee31c32170ab96f832fc8e0c9510c83e | [
"Apache-2.0"
] | 2 | 2020-10-15T14:59:56.000Z | 2020-10-15T17:29:18.000Z | nbdev_django/_modidx.py | fastai/nbdev-stdlib | 8a956e40ee31c32170ab96f832fc8e0c9510c83e | [
"Apache-2.0"
] | 3 | 2020-10-17T05:05:21.000Z | 2020-10-19T21:19:01.000Z | nbdev_django/_modidx.py | fastai/nbdev-stdlib | 8a956e40ee31c32170ab96f832fc8e0c9510c83e | [
"Apache-2.0"
] | null | null | null | # Autogenerated by get_module_idx.py
d = { 'syms': { 'django': { 'django.apps': 'http://django.readthedocs.org/en/latest/ref/applications.html#module-django.apps',
'django.db': 'http://django.readthedocs.org/en/latest/topics/db/index.html#module-django.db',
'django.dispatch': 'http://django.readthedocs.org/en/latest/topics/signals.html#module-django.dispatch',
'django.forms': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#module-django.forms',
'django.http': 'http://django.readthedocs.org/en/latest/ref/request-response.html#module-django.http',
'django.middleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.middleware',
'django.shortcuts': 'http://django.readthedocs.org/en/latest/topics/http/shortcuts.html#module-django.shortcuts',
'django.template': 'http://django.readthedocs.org/en/latest/topics/templates.html#module-django.template',
'django.test': 'http://django.readthedocs.org/en/latest/topics/testing/overview.html#module-django.test',
'django.urls': 'http://django.readthedocs.org/en/latest/ref/urlresolvers.html#module-django.urls',
'django.utils': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils',
'django.views': 'http://django.readthedocs.org/en/latest/ref/views.html#module-django.views',
'django.setup': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.setup'},
'django.conf': {'django.conf.urls': 'http://django.readthedocs.org/en/latest/ref/urls.html#module-django.conf.urls'},
'django.conf.urls': {'django.conf.urls.i18n': 'http://django.readthedocs.org/en/latest/topics/i18n/translation.html#module-django.conf.urls.i18n'},
'django.contrib': { 'django.contrib.admin': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#module-django.contrib.admin',
'django.contrib.admindocs': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/admindocs.html#module-django.contrib.admindocs',
'django.contrib.auth': 'http://django.readthedocs.org/en/latest/topics/auth/index.html#module-django.contrib.auth',
'django.contrib.contenttypes': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#module-django.contrib.contenttypes',
'django.contrib.flatpages': 'http://django.readthedocs.org/en/latest/ref/contrib/flatpages.html#module-django.contrib.flatpages',
'django.contrib.gis': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/index.html#module-django.contrib.gis',
'django.contrib.humanize': 'http://django.readthedocs.org/en/latest/ref/contrib/humanize.html#module-django.contrib.humanize',
'django.contrib.messages': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#module-django.contrib.messages',
'django.contrib.postgres': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/index.html#module-django.contrib.postgres',
'django.contrib.redirects': 'http://django.readthedocs.org/en/latest/ref/contrib/redirects.html#module-django.contrib.redirects',
'django.contrib.sessions': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#module-django.contrib.sessions',
'django.contrib.sitemaps': 'http://django.readthedocs.org/en/latest/ref/contrib/sitemaps.html#module-django.contrib.sitemaps',
'django.contrib.sites': 'http://django.readthedocs.org/en/latest/ref/contrib/sites.html#module-django.contrib.sites',
'django.contrib.staticfiles': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#module-django.contrib.staticfiles',
'django.contrib.syndication': 'http://django.readthedocs.org/en/latest/ref/contrib/syndication.html#module-django.contrib.syndication'},
'django.contrib.auth': { 'django.contrib.auth.backends': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#module-django.contrib.auth.backends',
'django.contrib.auth.forms': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#module-django.contrib.auth.forms',
'django.contrib.auth.hashers': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#module-django.contrib.auth.hashers',
'django.contrib.auth.middleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.contrib.auth.middleware',
'django.contrib.auth.password_validation': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#module-django.contrib.auth.password_validation',
'django.contrib.auth.signals': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#module-django.contrib.auth.signals',
'django.contrib.auth.views': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#module-django.contrib.auth.views',
'django.contrib.auth.authenticate': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.authenticate',
'django.contrib.auth.get_user': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.get_user',
'django.contrib.auth.get_user_model': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.get_user_model',
'django.contrib.auth.login': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.login',
'django.contrib.auth.logout': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.logout',
'django.contrib.auth.update_session_auth_hash': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.update_session_auth_hash'},
'django.contrib.contenttypes': { 'django.contrib.contenttypes.admin': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#module-django.contrib.contenttypes.admin',
'django.contrib.contenttypes.fields': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#module-django.contrib.contenttypes.fields',
'django.contrib.contenttypes.forms': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#module-django.contrib.contenttypes.forms'},
'django.contrib.gis': { 'django.contrib.gis.admin': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/admin.html#module-django.contrib.gis.admin',
'django.contrib.gis.feeds': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/feeds.html#module-django.contrib.gis.feeds',
'django.contrib.gis.forms': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#module-django.contrib.gis.forms',
'django.contrib.gis.gdal': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#module-django.contrib.gis.gdal',
'django.contrib.gis.geoip2': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#module-django.contrib.gis.geoip2',
'django.contrib.gis.geos': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#module-django.contrib.gis.geos',
'django.contrib.gis.measure': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#module-django.contrib.gis.measure',
'django.contrib.gis.utils': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/utils.html#module-django.contrib.gis.utils'},
'django.contrib.gis.db': { 'django.contrib.gis.db.backends': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/db-api.html#module-django.contrib.gis.db.backends',
'django.contrib.gis.db.models': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#module-django.contrib.gis.db.models'},
'django.contrib.gis.db.models': { 'django.contrib.gis.db.models.functions': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#module-django.contrib.gis.db.models.functions',
'django.contrib.gis.db.models.Collect': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoquerysets.html#django.contrib.gis.db.models.Collect',
'django.contrib.gis.db.models.Extent': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoquerysets.html#django.contrib.gis.db.models.Extent',
'django.contrib.gis.db.models.Extent3D': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoquerysets.html#django.contrib.gis.db.models.Extent3D',
'django.contrib.gis.db.models.GeometryCollectionField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.GeometryCollectionField',
'django.contrib.gis.db.models.GeometryField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.GeometryField',
'django.contrib.gis.db.models.LineStringField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.LineStringField',
'django.contrib.gis.db.models.MakeLine': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoquerysets.html#django.contrib.gis.db.models.MakeLine',
'django.contrib.gis.db.models.MultiLineStringField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.MultiLineStringField',
'django.contrib.gis.db.models.MultiPointField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.MultiPointField',
'django.contrib.gis.db.models.MultiPolygonField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.MultiPolygonField',
'django.contrib.gis.db.models.PointField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.PointField',
'django.contrib.gis.db.models.PolygonField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.PolygonField',
'django.contrib.gis.db.models.RasterField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/model-api.html#django.contrib.gis.db.models.RasterField',
'django.contrib.gis.db.models.Union': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoquerysets.html#django.contrib.gis.db.models.Union'},
'django.contrib.gis.forms': { 'django.contrib.gis.forms.widgets': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#module-django.contrib.gis.forms.widgets',
'django.contrib.gis.forms.GeometryCollectionField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.GeometryCollectionField',
'django.contrib.gis.forms.GeometryField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.GeometryField',
'django.contrib.gis.forms.LineStringField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.LineStringField',
'django.contrib.gis.forms.MultiLineStringField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.MultiLineStringField',
'django.contrib.gis.forms.MultiPointField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.MultiPointField',
'django.contrib.gis.forms.MultiPolygonField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.MultiPolygonField',
'django.contrib.gis.forms.PointField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.PointField',
'django.contrib.gis.forms.PolygonField': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.PolygonField'},
'django.contrib.gis.serializers': { 'django.contrib.gis.serializers.geojson': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/serializers.html#module-django.contrib.gis.serializers.geojson'},
'django.contrib.gis.utils': { 'django.contrib.gis.utils.layermapping': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/layermapping.html#module-django.contrib.gis.utils.layermapping',
'django.contrib.gis.utils.ogrinspect': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/ogrinspect.html#module-django.contrib.gis.utils.ogrinspect',
'django.contrib.gis.utils.LayerMapping': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/layermapping.html#django.contrib.gis.utils.LayerMapping',
'django.contrib.gis.utils.LayerMapping.save': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/layermapping.html#django.contrib.gis.utils.LayerMapping.save',
'django.contrib.gis.utils.mapping': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/ogrinspect.html#django.contrib.gis.utils.mapping'},
'django.contrib.messages': { 'django.contrib.messages.middleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.contrib.messages.middleware',
'django.contrib.messages.add_message': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.add_message',
'django.contrib.messages.get_messages': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.get_messages'},
'django.contrib.postgres': { 'django.contrib.postgres.aggregates': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#module-django.contrib.postgres.aggregates',
'django.contrib.postgres.constraints': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/constraints.html#module-django.contrib.postgres.constraints',
'django.contrib.postgres.expressions': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/expressions.html#module-django.contrib.postgres.expressions',
'django.contrib.postgres.indexes': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#module-django.contrib.postgres.indexes',
'django.contrib.postgres.validators': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/validators.html#module-django.contrib.postgres.validators'},
'django.contrib.sessions': { 'django.contrib.sessions.middleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.contrib.sessions.middleware'},
'django.contrib.sites': { 'django.contrib.sites.middleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.contrib.sites.middleware'},
'django.core': { 'django.core.checks': 'http://django.readthedocs.org/en/latest/topics/checks.html#module-django.core.checks',
'django.core.exceptions': 'http://django.readthedocs.org/en/latest/ref/exceptions.html#module-django.core.exceptions',
'django.core.files': 'http://django.readthedocs.org/en/latest/ref/files/index.html#module-django.core.files',
'django.core.mail': 'http://django.readthedocs.org/en/latest/topics/email.html#module-django.core.mail',
'django.core.management': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#module-django.core.management',
'django.core.paginator': 'http://django.readthedocs.org/en/latest/ref/paginator.html#module-django.core.paginator',
'django.core.signals': 'http://django.readthedocs.org/en/latest/ref/signals.html#module-django.core.signals',
'django.core.signing': 'http://django.readthedocs.org/en/latest/topics/signing.html#module-django.core.signing',
'django.core.validators': 'http://django.readthedocs.org/en/latest/ref/validators.html#module-django.core.validators'},
'django.core.files': { 'django.core.files.storage': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#module-django.core.files.storage',
'django.core.files.uploadedfile': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#module-django.core.files.uploadedfile',
'django.core.files.uploadhandler': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#module-django.core.files.uploadhandler',
'django.core.files.File': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File',
'django.core.files.File.__iter__': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File.__iter__',
'django.core.files.File.chunks': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File.chunks',
'django.core.files.File.close': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File.close',
'django.core.files.File.delete': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File.delete',
'django.core.files.File.multiple_chunks': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File.multiple_chunks',
'django.core.files.File.open': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File.open',
'django.core.files.File.save': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.File.save',
'django.core.files.storage._open': 'http://django.readthedocs.org/en/latest/howto/custom-file-storage.html#django.core.files.storage._open',
'django.core.files.storage._save': 'http://django.readthedocs.org/en/latest/howto/custom-file-storage.html#django.core.files.storage._save',
'django.core.files.storage.get_alternative_name': 'http://django.readthedocs.org/en/latest/howto/custom-file-storage.html#django.core.files.storage.get_alternative_name',
'django.core.files.storage.get_available_name': 'http://django.readthedocs.org/en/latest/howto/custom-file-storage.html#django.core.files.storage.get_available_name',
'django.core.files.storage.get_valid_name': 'http://django.readthedocs.org/en/latest/howto/custom-file-storage.html#django.core.files.storage.get_valid_name'},
'django.db': { 'django.db.backends': 'http://django.readthedocs.org/en/latest/ref/signals.html#module-django.db.backends',
'django.db.migrations': 'http://django.readthedocs.org/en/latest/topics/migrations.html#module-django.db.migrations',
'django.db.models': 'http://django.readthedocs.org/en/latest/topics/db/models.html#module-django.db.models',
'django.db.transaction': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#module-django.db.transaction',
'django.db.models.as_sql': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.as_sql',
'django.db.models.as_vendorname': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.as_vendorname',
'django.db.models.from_queryset': 'http://django.readthedocs.org/en/latest/topics/db/managers.html#django.db.models.from_queryset',
'django.db.models.get_lookup': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.get_lookup',
'django.db.models.get_transform': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.get_transform'},
'django.db.backends.base': { 'django.db.backends.base.schema': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#module-django.db.backends.base.schema',
'django.db.backends.base.DatabaseWrapper.execute_wrapper': 'http://django.readthedocs.org/en/latest/topics/db/instrumentation.html#django.db.backends.base.DatabaseWrapper.execute_wrapper'},
'django.db.migrations': { 'django.db.migrations.operations': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#module-django.db.migrations.operations'},
'django.db.models': { 'django.db.models.constraints': 'http://django.readthedocs.org/en/latest/ref/models/constraints.html#module-django.db.models.constraints',
'django.db.models.fields': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#module-django.db.models.fields',
'django.db.models.functions': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#module-django.db.models.functions',
'django.db.models.indexes': 'http://django.readthedocs.org/en/latest/ref/models/indexes.html#module-django.db.models.indexes',
'django.db.models.lookups': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#module-django.db.models.lookups',
'django.db.models.options': 'http://django.readthedocs.org/en/latest/ref/models/meta.html#module-django.db.models.options',
'django.db.models.signals': 'http://django.readthedocs.org/en/latest/ref/signals.html#module-django.db.models.signals',
'django.db.models.Aggregate': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Aggregate',
'django.db.models.AutoField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.AutoField',
'django.db.models.Avg': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Avg',
'django.db.models.BaseConstraint': 'http://django.readthedocs.org/en/latest/ref/models/constraints.html#django.db.models.BaseConstraint',
'django.db.models.BigAutoField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.BigAutoField',
'django.db.models.BigIntegerField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.BigIntegerField',
'django.db.models.BinaryField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.BinaryField',
'django.db.models.BooleanField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.BooleanField',
'django.db.models.CharField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.CharField',
'django.db.models.CheckConstraint': 'http://django.readthedocs.org/en/latest/ref/models/constraints.html#django.db.models.CheckConstraint',
'django.db.models.Count': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Count',
'django.db.models.DateField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.DateField',
'django.db.models.DateTimeField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.DateTimeField',
'django.db.models.DecimalField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.DecimalField',
'django.db.models.DurationField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.DurationField',
'django.db.models.EmailField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.EmailField',
'django.db.models.Exists': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Exists',
'django.db.models.Expression': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression',
'django.db.models.ExpressionWrapper': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.ExpressionWrapper',
'django.db.models.F': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.F',
'django.db.models.Field': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field',
'django.db.models.FileField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.FileField',
'django.db.models.FilePathField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.FilePathField',
'django.db.models.FilteredRelation': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.FilteredRelation',
'django.db.models.FloatField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.FloatField',
'django.db.models.ForeignKey': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.ForeignKey',
'django.db.models.Func': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Func',
'django.db.models.GenericIPAddressField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.GenericIPAddressField',
'django.db.models.ImageField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.ImageField',
'django.db.models.Index': 'http://django.readthedocs.org/en/latest/ref/models/indexes.html#django.db.models.Index',
'django.db.models.IntegerField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.IntegerField',
'django.db.models.JSONField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.JSONField',
'django.db.models.Lookup': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.Lookup',
'django.db.models.Manager': 'http://django.readthedocs.org/en/latest/topics/db/managers.html#django.db.models.Manager',
'django.db.models.ManyToManyField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.ManyToManyField',
'django.db.models.Max': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Max',
'django.db.models.Min': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Min',
'django.db.models.Model': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model',
'django.db.models.OneToOneField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.OneToOneField',
'django.db.models.OuterRef': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.OuterRef',
'django.db.models.PositiveBigIntegerField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.PositiveBigIntegerField',
'django.db.models.PositiveIntegerField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.PositiveIntegerField',
'django.db.models.PositiveSmallIntegerField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.PositiveSmallIntegerField',
'django.db.models.Prefetch': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Prefetch',
'django.db.models.Q': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Q',
'django.db.models.SlugField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.SlugField',
'django.db.models.SmallAutoField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.SmallAutoField',
'django.db.models.SmallIntegerField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.SmallIntegerField',
'django.db.models.StdDev': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.StdDev',
'django.db.models.Subquery': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Subquery',
'django.db.models.Sum': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Sum',
'django.db.models.TextField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.TextField',
'django.db.models.TimeField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.TimeField',
'django.db.models.Transform': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.Transform',
'django.db.models.URLField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.URLField',
'django.db.models.UUIDField': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.UUIDField',
'django.db.models.UniqueConstraint': 'http://django.readthedocs.org/en/latest/ref/models/constraints.html#django.db.models.UniqueConstraint',
'django.db.models.Value': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Value',
'django.db.models.Variance': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.Variance',
'django.db.models.BaseConstraint.validate': 'http://django.readthedocs.org/en/latest/ref/models/constraints.html#django.db.models.BaseConstraint.validate',
'django.db.models.CursorWrapper.callproc': 'http://django.readthedocs.org/en/latest/topics/db/sql.html#django.db.models.CursorWrapper.callproc',
'django.db.models.Expression.asc': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.asc',
'django.db.models.Expression.convert_value': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.convert_value',
'django.db.models.Expression.desc': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.desc',
'django.db.models.Expression.get_group_by_cols': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.get_group_by_cols',
'django.db.models.Expression.get_source_expressions': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.get_source_expressions',
'django.db.models.Expression.relabeled_clone': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.relabeled_clone',
'django.db.models.Expression.resolve_expression': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.resolve_expression',
'django.db.models.Expression.reverse_ordering': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.reverse_ordering',
'django.db.models.Expression.set_source_expressions': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Expression.set_source_expressions',
'django.db.models.Field.db_type': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.db_type',
'django.db.models.Field.deconstruct': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.deconstruct',
'django.db.models.Field.formfield': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.formfield',
'django.db.models.Field.from_db_value': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.from_db_value',
'django.db.models.Field.get_db_prep_save': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.get_db_prep_save',
'django.db.models.Field.get_db_prep_value': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.get_db_prep_value',
'django.db.models.Field.get_internal_type': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.get_internal_type',
'django.db.models.Field.get_prep_value': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.get_prep_value',
'django.db.models.Field.pre_save': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.pre_save',
'django.db.models.Field.rel_db_type': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.rel_db_type',
'django.db.models.Field.to_python': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.to_python',
'django.db.models.Field.value_from_object': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.value_from_object',
'django.db.models.Field.value_to_string': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.Field.value_to_string',
'django.db.models.Func.as_sql': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.Func.as_sql',
'django.db.models.Lookup.process_lhs': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.Lookup.process_lhs',
'django.db.models.Lookup.process_rhs': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.Lookup.process_rhs',
'django.db.models.Manager.raw': 'http://django.readthedocs.org/en/latest/topics/db/sql.html#django.db.models.Manager.raw',
'django.db.models.Model.__eq__': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.__eq__',
'django.db.models.Model.__hash__': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.__hash__',
'django.db.models.Model.__str__': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.__str__',
'django.db.models.Model.clean': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.clean',
'django.db.models.Model.clean_fields': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.clean_fields',
'django.db.models.Model.delete': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.delete',
'django.db.models.Model.from_db': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.from_db',
'django.db.models.Model.full_clean': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.full_clean',
'django.db.models.Model.get_FOO_display': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.get_FOO_display',
'django.db.models.Model.get_absolute_url': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.get_absolute_url',
'django.db.models.Model.get_deferred_fields': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.get_deferred_fields',
'django.db.models.Model.get_next_by_FOO': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.get_next_by_FOO',
'django.db.models.Model.get_previous_by_FOO': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.get_previous_by_FOO',
'django.db.models.Model.refresh_from_db': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.refresh_from_db',
'django.db.models.Model.save': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.save',
'django.db.models.Model.validate_constraints': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.validate_constraints',
'django.db.models.Model.validate_unique': 'http://django.readthedocs.org/en/latest/ref/models/instances.html#django.db.models.Model.validate_unique',
'django.db.models.SET': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.SET',
'django.db.models.prefetch_related_objects': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.prefetch_related_objects'},
'django.db.models.fields': { 'django.db.models.fields.related': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#module-django.db.models.fields.related'},
'django.forms': { 'django.forms.fields': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#module-django.forms.fields',
'django.forms.formsets': 'http://django.readthedocs.org/en/latest/ref/forms/formsets.html#module-django.forms.formsets',
'django.forms.models': 'http://django.readthedocs.org/en/latest/ref/forms/models.html#module-django.forms.models',
'django.forms.renderers': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#module-django.forms.renderers',
'django.forms.widgets': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#module-django.forms.widgets',
'django.forms.BooleanField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.BooleanField',
'django.forms.BoundField': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.BoundField',
'django.forms.CharField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.CharField',
'django.forms.CheckboxInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.CheckboxInput',
'django.forms.CheckboxSelectMultiple': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.CheckboxSelectMultiple',
'django.forms.ChoiceField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ChoiceField',
'django.forms.ClearableFileInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.ClearableFileInput',
'django.forms.ComboField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ComboField',
'django.forms.DateField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.DateField',
'django.forms.DateInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.DateInput',
'django.forms.DateTimeField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.DateTimeField',
'django.forms.DateTimeInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.DateTimeInput',
'django.forms.DecimalField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.DecimalField',
'django.forms.DurationField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.DurationField',
'django.forms.EmailField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.EmailField',
'django.forms.EmailInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.EmailInput',
'django.forms.ErrorList': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.ErrorList',
'django.forms.Field': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.Field',
'django.forms.FileField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.FileField',
'django.forms.FileInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.FileInput',
'django.forms.FilePathField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.FilePathField',
'django.forms.FloatField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.FloatField',
'django.forms.Form': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form',
'django.forms.GenericIPAddressField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.GenericIPAddressField',
'django.forms.HiddenInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.HiddenInput',
'django.forms.ImageField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ImageField',
'django.forms.IntegerField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.IntegerField',
'django.forms.JSONField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.JSONField',
'django.forms.ModelChoiceField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ModelChoiceField',
'django.forms.ModelChoiceIterator': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ModelChoiceIterator',
'django.forms.ModelChoiceIteratorValue': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ModelChoiceIteratorValue',
'django.forms.ModelForm': 'http://django.readthedocs.org/en/latest/topics/forms/modelforms.html#django.forms.ModelForm',
'django.forms.ModelMultipleChoiceField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ModelMultipleChoiceField',
'django.forms.MultiValueField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.MultiValueField',
'django.forms.MultiWidget': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.MultiWidget',
'django.forms.MultipleChoiceField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.MultipleChoiceField',
'django.forms.MultipleHiddenInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.MultipleHiddenInput',
'django.forms.NullBooleanField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.NullBooleanField',
'django.forms.NullBooleanSelect': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.NullBooleanSelect',
'django.forms.NumberInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.NumberInput',
'django.forms.PasswordInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.PasswordInput',
'django.forms.RadioSelect': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.RadioSelect',
'django.forms.RegexField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.RegexField',
'django.forms.Select': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Select',
'django.forms.SelectDateWidget': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.SelectDateWidget',
'django.forms.SelectMultiple': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.SelectMultiple',
'django.forms.SlugField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.SlugField',
'django.forms.SplitDateTimeField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.SplitDateTimeField',
'django.forms.SplitDateTimeWidget': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.SplitDateTimeWidget',
'django.forms.SplitHiddenDateTimeWidget': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.SplitHiddenDateTimeWidget',
'django.forms.TextInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.TextInput',
'django.forms.Textarea': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Textarea',
'django.forms.TimeField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.TimeField',
'django.forms.TimeInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.TimeInput',
'django.forms.TypedChoiceField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.TypedChoiceField',
'django.forms.TypedMultipleChoiceField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.TypedMultipleChoiceField',
'django.forms.URLField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.URLField',
'django.forms.URLInput': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.URLInput',
'django.forms.UUIDField': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.UUIDField',
'django.forms.Widget': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget',
'django.forms.BoundField.as_hidden': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.BoundField.as_hidden',
'django.forms.BoundField.as_widget': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.BoundField.as_widget',
'django.forms.BoundField.css_classes': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.BoundField.css_classes',
'django.forms.BoundField.label_tag': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.BoundField.label_tag',
'django.forms.BoundField.legend_tag': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.BoundField.legend_tag',
'django.forms.BoundField.value': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.BoundField.value',
'django.forms.ErrorList.as_text': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.ErrorList.as_text',
'django.forms.ErrorList.as_ul': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.ErrorList.as_ul',
'django.forms.ErrorList.get_context': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.ErrorList.get_context',
'django.forms.ErrorList.render': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.ErrorList.render',
'django.forms.Field.clean': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.Field.clean',
'django.forms.Field.get_bound_field': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Field.get_bound_field',
'django.forms.Field.has_changed': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.Field.has_changed',
'django.forms.Form.add_error': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.add_error',
'django.forms.Form.as_div': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.as_div',
'django.forms.Form.as_p': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.as_p',
'django.forms.Form.as_table': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.as_table',
'django.forms.Form.as_ul': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.as_ul',
'django.forms.Form.clean': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.clean',
'django.forms.Form.get_context': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.get_context',
'django.forms.Form.get_initial_for_field': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.get_initial_for_field',
'django.forms.Form.has_changed': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.has_changed',
'django.forms.Form.has_error': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.has_error',
'django.forms.Form.is_multipart': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.is_multipart',
'django.forms.Form.is_valid': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.is_valid',
'django.forms.Form.non_field_errors': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.non_field_errors',
'django.forms.Form.order_fields': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.order_fields',
'django.forms.Form.render': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.render',
'django.forms.ModelChoiceIterator.__iter__': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ModelChoiceIterator.__iter__',
'django.forms.ModelChoiceIteratorValue.__str__': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.ModelChoiceIteratorValue.__str__',
'django.forms.MultiValueField.compress': 'http://django.readthedocs.org/en/latest/ref/forms/fields.html#django.forms.MultiValueField.compress',
'django.forms.MultiWidget.decompress': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.MultiWidget.decompress',
'django.forms.MultiWidget.get_context': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.MultiWidget.get_context',
'django.forms.Widget.format_value': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget.format_value',
'django.forms.Widget.get_context': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget.get_context',
'django.forms.Widget.id_for_label': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget.id_for_label',
'django.forms.Widget.render': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget.render',
'django.forms.Widget.use_required_attribute': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget.use_required_attribute',
'django.forms.Widget.value_from_datadict': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget.value_from_datadict',
'django.forms.Widget.value_omitted_from_data': 'http://django.readthedocs.org/en/latest/ref/forms/widgets.html#django.forms.Widget.value_omitted_from_data'},
'django.middleware': { 'django.middleware.cache': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.middleware.cache',
'django.middleware.clickjacking': 'http://django.readthedocs.org/en/latest/ref/clickjacking.html#module-django.middleware.clickjacking',
'django.middleware.common': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.middleware.common',
'django.middleware.csrf': 'http://django.readthedocs.org/en/latest/ref/csrf.html#module-django.middleware.csrf',
'django.middleware.gzip': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.middleware.gzip',
'django.middleware.http': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.middleware.http',
'django.middleware.locale': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.middleware.locale',
'django.middleware.security': 'http://django.readthedocs.org/en/latest/ref/middleware.html#module-django.middleware.security'},
'django.template': { 'django.template.backends': 'http://django.readthedocs.org/en/latest/topics/templates.html#module-django.template.backends',
'django.template.loader': 'http://django.readthedocs.org/en/latest/topics/templates.html#module-django.template.loader',
'django.template.response': 'http://django.readthedocs.org/en/latest/ref/template-response.html#module-django.template.response',
'django.template.Context': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Context',
'django.template.Engine': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Engine',
'django.template.RequestContext': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.RequestContext',
'django.template.Template': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Template',
'django.template.Context.flatten': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Context.flatten',
'django.template.Context.get': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Context.get',
'django.template.Context.pop': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Context.pop',
'django.template.Context.push': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Context.push',
'django.template.Context.setdefault': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Context.setdefault',
'django.template.Context.update': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Context.update',
'django.template.Engine.from_string': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Engine.from_string',
'django.template.Engine.get_default': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Engine.get_default',
'django.template.Engine.get_template': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Engine.get_template',
'django.template.Engine.select_template': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Engine.select_template',
'django.template.Library.filter': 'http://django.readthedocs.org/en/latest/howto/custom-template-tags.html#django.template.Library.filter',
'django.template.Library.inclusion_tag': 'http://django.readthedocs.org/en/latest/howto/custom-template-tags.html#django.template.Library.inclusion_tag',
'django.template.Library.simple_tag': 'http://django.readthedocs.org/en/latest/howto/custom-template-tags.html#django.template.Library.simple_tag',
'django.template.Template.render': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.Template.render',
'django.template.defaultfilters.stringfilter': 'http://django.readthedocs.org/en/latest/howto/custom-template-tags.html#django.template.defaultfilters.stringfilter'},
'django.template.backends': { 'django.template.backends.django': 'http://django.readthedocs.org/en/latest/topics/templates.html#module-django.template.backends.django',
'django.template.backends.jinja2': 'http://django.readthedocs.org/en/latest/topics/templates.html#module-django.template.backends.jinja2'},
'django.test': { 'django.test.signals': 'http://django.readthedocs.org/en/latest/ref/signals.html#module-django.test.signals',
'django.test.utils': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#module-django.test.utils',
'django.test.Client': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client',
'django.test.LiveServerTestCase': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.LiveServerTestCase',
'django.test.RequestFactory': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.RequestFactory',
'django.test.Response': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Response',
'django.test.SimpleTestCase': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase',
'django.test.TestCase': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.TestCase',
'django.test.TransactionTestCase': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.TransactionTestCase',
'django.test.Client.delete': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.delete',
'django.test.Client.force_login': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.force_login',
'django.test.Client.get': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.get',
'django.test.Client.head': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.head',
'django.test.Client.login': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.login',
'django.test.Client.logout': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.logout',
'django.test.Client.options': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.options',
'django.test.Client.patch': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.patch',
'django.test.Client.post': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.post',
'django.test.Client.put': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.put',
'django.test.Client.trace': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Client.trace',
'django.test.Response.json': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.Response.json',
'django.test.SimpleTestCase.assertContains': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertContains',
'django.test.SimpleTestCase.assertFieldOutput': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertFieldOutput',
'django.test.SimpleTestCase.assertFormError': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertFormError',
'django.test.SimpleTestCase.assertFormsetError': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertFormsetError',
'django.test.SimpleTestCase.assertHTMLEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertHTMLEqual',
'django.test.SimpleTestCase.assertHTMLNotEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertHTMLNotEqual',
'django.test.SimpleTestCase.assertInHTML': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertInHTML',
'django.test.SimpleTestCase.assertJSONEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertJSONEqual',
'django.test.SimpleTestCase.assertJSONNotEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertJSONNotEqual',
'django.test.SimpleTestCase.assertNotContains': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertNotContains',
'django.test.SimpleTestCase.assertRaisesMessage': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertRaisesMessage',
'django.test.SimpleTestCase.assertRedirects': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertRedirects',
'django.test.SimpleTestCase.assertTemplateNotUsed': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertTemplateNotUsed',
'django.test.SimpleTestCase.assertTemplateUsed': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertTemplateUsed',
'django.test.SimpleTestCase.assertURLEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertURLEqual',
'django.test.SimpleTestCase.assertWarnsMessage': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertWarnsMessage',
'django.test.SimpleTestCase.assertXMLEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertXMLEqual',
'django.test.SimpleTestCase.assertXMLNotEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.assertXMLNotEqual',
'django.test.SimpleTestCase.modify_settings': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.modify_settings',
'django.test.SimpleTestCase.settings': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.SimpleTestCase.settings',
'django.test.TestCase.captureOnCommitCallbacks': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.TestCase.captureOnCommitCallbacks',
'django.test.TestCase.setUpTestData': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.TestCase.setUpTestData',
'django.test.TransactionTestCase.assertNumQueries': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.TransactionTestCase.assertNumQueries',
'django.test.TransactionTestCase.assertQuerysetEqual': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.TransactionTestCase.assertQuerysetEqual',
'django.test.modify_settings': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.modify_settings',
'django.test.override_settings': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.override_settings',
'django.test.skipIfDBFeature': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.skipIfDBFeature',
'django.test.skipUnlessDBFeature': 'http://django.readthedocs.org/en/latest/topics/testing/tools.html#django.test.skipUnlessDBFeature'},
'django.urls': { 'django.urls.conf': 'http://django.readthedocs.org/en/latest/ref/urls.html#module-django.urls.conf',
'django.urls.ResolverMatch': 'http://django.readthedocs.org/en/latest/ref/urlresolvers.html#django.urls.ResolverMatch',
'django.urls.get_script_prefix': 'http://django.readthedocs.org/en/latest/ref/urlresolvers.html#django.urls.get_script_prefix',
'django.urls.include': 'http://django.readthedocs.org/en/latest/ref/urls.html#django.urls.include',
'django.urls.path': 'http://django.readthedocs.org/en/latest/ref/urls.html#django.urls.path',
'django.urls.re_path': 'http://django.readthedocs.org/en/latest/ref/urls.html#django.urls.re_path',
'django.urls.register_converter': 'http://django.readthedocs.org/en/latest/ref/urls.html#django.urls.register_converter',
'django.urls.resolve': 'http://django.readthedocs.org/en/latest/ref/urlresolvers.html#django.urls.resolve',
'django.urls.reverse': 'http://django.readthedocs.org/en/latest/ref/urlresolvers.html#django.urls.reverse',
'django.urls.reverse_lazy': 'http://django.readthedocs.org/en/latest/ref/urlresolvers.html#django.urls.reverse_lazy'},
'django.utils': { 'django.utils.cache': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.cache',
'django.utils.dateparse': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.dateparse',
'django.utils.decorators': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.decorators',
'django.utils.encoding': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.encoding',
'django.utils.feedgenerator': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.feedgenerator',
'django.utils.functional': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.functional',
'django.utils.html': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.html',
'django.utils.http': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.http',
'django.utils.log': 'http://django.readthedocs.org/en/latest/ref/logging.html#module-django.utils.log',
'django.utils.module_loading': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.module_loading',
'django.utils.safestring': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.safestring',
'django.utils.text': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.text',
'django.utils.timezone': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.timezone',
'django.utils.translation': 'http://django.readthedocs.org/en/latest/ref/utils.html#module-django.utils.translation'},
'django.views.decorators': { 'django.views.decorators.cache': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#module-django.views.decorators.cache',
'django.views.decorators.common': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#module-django.views.decorators.common',
'django.views.decorators.csrf': 'http://django.readthedocs.org/en/latest/ref/csrf.html#module-django.views.decorators.csrf',
'django.views.decorators.gzip': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#module-django.views.decorators.gzip',
'django.views.decorators.http': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#module-django.views.decorators.http',
'django.views.decorators.vary': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#module-django.views.decorators.vary'},
'django.views.generic': { 'django.views.generic.dates': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#module-django.views.generic.dates'},
'django.views': {'django.views.i18n': 'http://django.readthedocs.org/en/latest/topics/i18n/translation.html#module-django.views.i18n'},
'django.apps': { 'django.apps.AppConfig': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.AppConfig',
'django.apps.AppConfig.get_model': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.AppConfig.get_model',
'django.apps.AppConfig.get_models': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.AppConfig.get_models',
'django.apps.AppConfig.ready': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.AppConfig.ready',
'django.apps.apps.get_app_config': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.apps.get_app_config',
'django.apps.apps.get_app_configs': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.apps.get_app_configs',
'django.apps.apps.get_model': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.apps.get_model',
'django.apps.apps.is_installed': 'http://django.readthedocs.org/en/latest/ref/applications.html#django.apps.apps.is_installed'},
'django.contrib.admin': { 'django.contrib.admin.AdminSite': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.AdminSite',
'django.contrib.admin.InlineModelAdmin': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin',
'django.contrib.admin.ModelAdmin': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin',
'django.contrib.admin.StackedInline': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.StackedInline',
'django.contrib.admin.TabularInline': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.TabularInline',
'django.contrib.admin.AdminSite.add_action': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/actions.html#django.contrib.admin.AdminSite.add_action',
'django.contrib.admin.AdminSite.disable_action': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/actions.html#django.contrib.admin.AdminSite.disable_action',
'django.contrib.admin.AdminSite.each_context': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.AdminSite.each_context',
'django.contrib.admin.AdminSite.get_app_list': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.AdminSite.get_app_list',
'django.contrib.admin.AdminSite.has_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.AdminSite.has_permission',
'django.contrib.admin.AdminSite.register': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.AdminSite.register',
'django.contrib.admin.AdminSite.unregister': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.AdminSite.unregister',
'django.contrib.admin.InlineModelAdmin.get_extra': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin.get_extra',
'django.contrib.admin.InlineModelAdmin.get_formset': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin.get_formset',
'django.contrib.admin.InlineModelAdmin.get_max_num': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin.get_max_num',
'django.contrib.admin.InlineModelAdmin.get_min_num': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin.get_min_num',
'django.contrib.admin.InlineModelAdmin.has_add_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin.has_add_permission',
'django.contrib.admin.InlineModelAdmin.has_change_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin.has_change_permission',
'django.contrib.admin.InlineModelAdmin.has_delete_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.InlineModelAdmin.has_delete_permission',
'django.contrib.admin.ModelAdmin.add_view': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.add_view',
'django.contrib.admin.ModelAdmin.change_view': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.change_view',
'django.contrib.admin.ModelAdmin.changelist_view': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.changelist_view',
'django.contrib.admin.ModelAdmin.delete_model': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.delete_model',
'django.contrib.admin.ModelAdmin.delete_queryset': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.delete_queryset',
'django.contrib.admin.ModelAdmin.delete_view': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.delete_view',
'django.contrib.admin.ModelAdmin.formfield_for_choice_field': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.formfield_for_choice_field',
'django.contrib.admin.ModelAdmin.formfield_for_foreignkey': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.formfield_for_foreignkey',
'django.contrib.admin.ModelAdmin.formfield_for_manytomany': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.formfield_for_manytomany',
'django.contrib.admin.ModelAdmin.get_actions': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/actions.html#django.contrib.admin.ModelAdmin.get_actions',
'django.contrib.admin.ModelAdmin.get_autocomplete_fields': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_autocomplete_fields',
'django.contrib.admin.ModelAdmin.get_changeform_initial_data': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_changeform_initial_data',
'django.contrib.admin.ModelAdmin.get_changelist': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_changelist',
'django.contrib.admin.ModelAdmin.get_changelist_form': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_changelist_form',
'django.contrib.admin.ModelAdmin.get_changelist_formset': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_changelist_formset',
'django.contrib.admin.ModelAdmin.get_deleted_objects': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_deleted_objects',
'django.contrib.admin.ModelAdmin.get_exclude': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_exclude',
'django.contrib.admin.ModelAdmin.get_fields': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_fields',
'django.contrib.admin.ModelAdmin.get_fieldsets': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_fieldsets',
'django.contrib.admin.ModelAdmin.get_form': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_form',
'django.contrib.admin.ModelAdmin.get_formset_kwargs': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_formset_kwargs',
'django.contrib.admin.ModelAdmin.get_formsets_with_inlines': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_formsets_with_inlines',
'django.contrib.admin.ModelAdmin.get_inline_instances': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_inline_instances',
'django.contrib.admin.ModelAdmin.get_inlines': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_inlines',
'django.contrib.admin.ModelAdmin.get_list_display': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_list_display',
'django.contrib.admin.ModelAdmin.get_list_display_links': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_list_display_links',
'django.contrib.admin.ModelAdmin.get_list_filter': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_list_filter',
'django.contrib.admin.ModelAdmin.get_list_select_related': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_list_select_related',
'django.contrib.admin.ModelAdmin.get_ordering': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_ordering',
'django.contrib.admin.ModelAdmin.get_paginator': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_paginator',
'django.contrib.admin.ModelAdmin.get_prepopulated_fields': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_prepopulated_fields',
'django.contrib.admin.ModelAdmin.get_queryset': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_queryset',
'django.contrib.admin.ModelAdmin.get_readonly_fields': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_readonly_fields',
'django.contrib.admin.ModelAdmin.get_search_fields': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_search_fields',
'django.contrib.admin.ModelAdmin.get_search_results': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_search_results',
'django.contrib.admin.ModelAdmin.get_sortable_by': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_sortable_by',
'django.contrib.admin.ModelAdmin.get_urls': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.get_urls',
'django.contrib.admin.ModelAdmin.has_add_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.has_add_permission',
'django.contrib.admin.ModelAdmin.has_change_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.has_change_permission',
'django.contrib.admin.ModelAdmin.has_delete_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.has_delete_permission',
'django.contrib.admin.ModelAdmin.has_module_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.has_module_permission',
'django.contrib.admin.ModelAdmin.has_view_permission': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.has_view_permission',
'django.contrib.admin.ModelAdmin.history_view': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.history_view',
'django.contrib.admin.ModelAdmin.lookup_allowed': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.lookup_allowed',
'django.contrib.admin.ModelAdmin.message_user': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.message_user',
'django.contrib.admin.ModelAdmin.response_add': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.response_add',
'django.contrib.admin.ModelAdmin.response_change': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.response_change',
'django.contrib.admin.ModelAdmin.response_delete': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.response_delete',
'django.contrib.admin.ModelAdmin.save_formset': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.save_formset',
'django.contrib.admin.ModelAdmin.save_model': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.save_model',
'django.contrib.admin.ModelAdmin.save_related': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.ModelAdmin.save_related',
'django.contrib.admin.action': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/actions.html#django.contrib.admin.action',
'django.contrib.admin.autodiscover': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.autodiscover',
'django.contrib.admin.display': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.display',
'django.contrib.admin.register': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.register'},
'django.contrib.admin.apps': { 'django.contrib.admin.apps.AdminConfig': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.apps.AdminConfig',
'django.contrib.admin.apps.SimpleAdminConfig': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.apps.SimpleAdminConfig'},
'django.contrib.admin.models': { 'django.contrib.admin.models.LogEntry': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.models.LogEntry',
'django.contrib.admin.models.LogEntry.get_change_message': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.models.LogEntry.get_change_message',
'django.contrib.admin.models.LogEntry.get_edited_object': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.models.LogEntry.get_edited_object'},
'django.contrib.auth.backends': { 'django.contrib.auth.backends.AllowAllUsersModelBackend': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.AllowAllUsersModelBackend',
'django.contrib.auth.backends.AllowAllUsersRemoteUserBackend': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.AllowAllUsersRemoteUserBackend',
'django.contrib.auth.backends.BaseBackend': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.BaseBackend',
'django.contrib.auth.backends.ModelBackend': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend',
'django.contrib.auth.backends.RemoteUserBackend': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.RemoteUserBackend',
'django.contrib.auth.backends.BaseBackend.get_all_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.BaseBackend.get_all_permissions',
'django.contrib.auth.backends.BaseBackend.get_group_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.BaseBackend.get_group_permissions',
'django.contrib.auth.backends.BaseBackend.get_user_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.BaseBackend.get_user_permissions',
'django.contrib.auth.backends.BaseBackend.has_perm': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.BaseBackend.has_perm',
'django.contrib.auth.backends.ModelBackend.authenticate': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.authenticate',
'django.contrib.auth.backends.ModelBackend.get_all_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.get_all_permissions',
'django.contrib.auth.backends.ModelBackend.get_group_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.get_group_permissions',
'django.contrib.auth.backends.ModelBackend.get_user_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.get_user_permissions',
'django.contrib.auth.backends.ModelBackend.has_module_perms': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.has_module_perms',
'django.contrib.auth.backends.ModelBackend.has_perm': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.has_perm',
'django.contrib.auth.backends.ModelBackend.user_can_authenticate': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.user_can_authenticate',
'django.contrib.auth.backends.ModelBackend.with_perm': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.ModelBackend.with_perm',
'django.contrib.auth.backends.RemoteUserBackend.authenticate': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.RemoteUserBackend.authenticate',
'django.contrib.auth.backends.RemoteUserBackend.clean_username': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.RemoteUserBackend.clean_username',
'django.contrib.auth.backends.RemoteUserBackend.configure_user': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.RemoteUserBackend.configure_user',
'django.contrib.auth.backends.RemoteUserBackend.user_can_authenticate': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.backends.RemoteUserBackend.user_can_authenticate'},
'django.contrib.auth.forms': { 'django.contrib.auth.forms.AdminPasswordChangeForm': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.AdminPasswordChangeForm',
'django.contrib.auth.forms.AuthenticationForm': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.AuthenticationForm',
'django.contrib.auth.forms.PasswordChangeForm': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.PasswordChangeForm',
'django.contrib.auth.forms.PasswordResetForm': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.PasswordResetForm',
'django.contrib.auth.forms.SetPasswordForm': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.SetPasswordForm',
'django.contrib.auth.forms.UserChangeForm': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.UserChangeForm',
'django.contrib.auth.forms.UserCreationForm': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.UserCreationForm',
'django.contrib.auth.forms.AuthenticationForm.confirm_login_allowed': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.AuthenticationForm.confirm_login_allowed',
'django.contrib.auth.forms.PasswordResetForm.send_mail': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.forms.PasswordResetForm.send_mail'},
'django.contrib.auth.middleware': { 'django.contrib.auth.middleware.AuthenticationMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.PersistentRemoteUserMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.contrib.auth.middleware.PersistentRemoteUserMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.contrib.auth.middleware.RemoteUserMiddleware'},
'django.contrib.auth.mixins': { 'django.contrib.auth.mixins.AccessMixin': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.AccessMixin',
'django.contrib.auth.mixins.LoginRequiredMixin': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.LoginRequiredMixin',
'django.contrib.auth.mixins.PermissionRequiredMixin': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.PermissionRequiredMixin',
'django.contrib.auth.mixins.UserPassesTestMixin': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.UserPassesTestMixin',
'django.contrib.auth.mixins.AccessMixin.get_login_url': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.AccessMixin.get_login_url',
'django.contrib.auth.mixins.AccessMixin.get_permission_denied_message': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.AccessMixin.get_permission_denied_message',
'django.contrib.auth.mixins.AccessMixin.get_redirect_field_name': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.AccessMixin.get_redirect_field_name',
'django.contrib.auth.mixins.AccessMixin.handle_no_permission': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.AccessMixin.handle_no_permission',
'django.contrib.auth.mixins.PermissionRequiredMixin.get_permission_required': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.PermissionRequiredMixin.get_permission_required',
'django.contrib.auth.mixins.PermissionRequiredMixin.has_permission': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.PermissionRequiredMixin.has_permission',
'django.contrib.auth.mixins.UserPassesTestMixin.get_test_func': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.UserPassesTestMixin.get_test_func',
'django.contrib.auth.mixins.UserPassesTestMixin.test_func': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.mixins.UserPassesTestMixin.test_func'},
'django.contrib.auth.models': { 'django.contrib.auth.models.AbstractBaseUser': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser',
'django.contrib.auth.models.AbstractUser': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractUser',
'django.contrib.auth.models.AnonymousUser': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.AnonymousUser',
'django.contrib.auth.models.BaseUserManager': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.BaseUserManager',
'django.contrib.auth.models.CustomUser': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.CustomUser',
'django.contrib.auth.models.CustomUserManager': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.CustomUserManager',
'django.contrib.auth.models.Group': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.Group',
'django.contrib.auth.models.Permission': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.Permission',
'django.contrib.auth.models.PermissionsMixin': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.PermissionsMixin',
'django.contrib.auth.models.User': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User',
'django.contrib.auth.models.UserManager': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.UserManager',
'django.contrib.auth.models.AbstractBaseUser.check_password': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.check_password',
'django.contrib.auth.models.AbstractBaseUser.clean': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.clean',
'django.contrib.auth.models.AbstractBaseUser.get_email_field_name': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.get_email_field_name',
'django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash',
'django.contrib.auth.models.AbstractBaseUser.get_username': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.get_username',
'django.contrib.auth.models.AbstractBaseUser.has_usable_password': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.has_usable_password',
'django.contrib.auth.models.AbstractBaseUser.normalize_username': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.normalize_username',
'django.contrib.auth.models.AbstractBaseUser.set_password': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.set_password',
'django.contrib.auth.models.AbstractBaseUser.set_unusable_password': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractBaseUser.set_unusable_password',
'django.contrib.auth.models.AbstractUser.clean': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.AbstractUser.clean',
'django.contrib.auth.models.BaseUserManager.get_by_natural_key': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.BaseUserManager.get_by_natural_key',
'django.contrib.auth.models.BaseUserManager.make_random_password': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.BaseUserManager.make_random_password',
'django.contrib.auth.models.BaseUserManager.normalize_email': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.BaseUserManager.normalize_email',
'django.contrib.auth.models.CustomUser.get_full_name': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.CustomUser.get_full_name',
'django.contrib.auth.models.CustomUser.get_short_name': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.CustomUser.get_short_name',
'django.contrib.auth.models.CustomUserManager.create_superuser': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.CustomUserManager.create_superuser',
'django.contrib.auth.models.CustomUserManager.create_user': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.CustomUserManager.create_user',
'django.contrib.auth.models.PermissionsMixin.get_all_permissions': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.PermissionsMixin.get_all_permissions',
'django.contrib.auth.models.PermissionsMixin.get_group_permissions': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.PermissionsMixin.get_group_permissions',
'django.contrib.auth.models.PermissionsMixin.get_user_permissions': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.PermissionsMixin.get_user_permissions',
'django.contrib.auth.models.PermissionsMixin.has_module_perms': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.PermissionsMixin.has_module_perms',
'django.contrib.auth.models.PermissionsMixin.has_perm': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.PermissionsMixin.has_perm',
'django.contrib.auth.models.PermissionsMixin.has_perms': 'http://django.readthedocs.org/en/latest/topics/auth/customizing.html#django.contrib.auth.models.PermissionsMixin.has_perms',
'django.contrib.auth.models.User.check_password': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.check_password',
'django.contrib.auth.models.User.email_user': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.email_user',
'django.contrib.auth.models.User.get_all_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.get_all_permissions',
'django.contrib.auth.models.User.get_full_name': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.get_full_name',
'django.contrib.auth.models.User.get_group_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.get_group_permissions',
'django.contrib.auth.models.User.get_short_name': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.get_short_name',
'django.contrib.auth.models.User.get_user_permissions': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.get_user_permissions',
'django.contrib.auth.models.User.get_username': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.get_username',
'django.contrib.auth.models.User.has_module_perms': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.has_module_perms',
'django.contrib.auth.models.User.has_perm': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.has_perm',
'django.contrib.auth.models.User.has_perms': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.has_perms',
'django.contrib.auth.models.User.has_usable_password': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.has_usable_password',
'django.contrib.auth.models.User.set_password': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.set_password',
'django.contrib.auth.models.User.set_unusable_password': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.User.set_unusable_password',
'django.contrib.auth.models.UserManager.create_superuser': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.UserManager.create_superuser',
'django.contrib.auth.models.UserManager.create_user': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.UserManager.create_user',
'django.contrib.auth.models.UserManager.with_perm': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.models.UserManager.with_perm'},
'django.contrib.auth.password_validation': { 'django.contrib.auth.password_validation.CommonPasswordValidator': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.CommonPasswordValidator',
'django.contrib.auth.password_validation.MinimumLengthValidator': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.MinimumLengthValidator',
'django.contrib.auth.password_validation.NumericPasswordValidator': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.NumericPasswordValidator',
'django.contrib.auth.password_validation.UserAttributeSimilarityValidator': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
'django.contrib.auth.password_validation.get_password_validators': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.get_password_validators',
'django.contrib.auth.password_validation.password_changed': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.password_changed',
'django.contrib.auth.password_validation.password_validators_help_text_html': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.password_validators_help_text_html',
'django.contrib.auth.password_validation.password_validators_help_texts': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.password_validators_help_texts',
'django.contrib.auth.password_validation.validate_password': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.password_validation.validate_password'},
'django.contrib.auth.validators': { 'django.contrib.auth.validators.ASCIIUsernameValidator': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.validators.ASCIIUsernameValidator',
'django.contrib.auth.validators.UnicodeUsernameValidator': 'http://django.readthedocs.org/en/latest/ref/contrib/auth.html#django.contrib.auth.validators.UnicodeUsernameValidator'},
'django.contrib.auth.views': { 'django.contrib.auth.views.LoginView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.LoginView',
'django.contrib.auth.views.LogoutView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.LogoutView',
'django.contrib.auth.views.PasswordChangeDoneView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.PasswordChangeDoneView',
'django.contrib.auth.views.PasswordChangeView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.PasswordChangeView',
'django.contrib.auth.views.PasswordResetCompleteView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.PasswordResetCompleteView',
'django.contrib.auth.views.PasswordResetConfirmView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.PasswordResetConfirmView',
'django.contrib.auth.views.PasswordResetDoneView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.PasswordResetDoneView',
'django.contrib.auth.views.PasswordResetView': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.PasswordResetView',
'django.contrib.auth.views.LoginView.get_default_redirect_url': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.LoginView.get_default_redirect_url',
'django.contrib.auth.views.logout_then_login': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.logout_then_login',
'django.contrib.auth.views.redirect_to_login': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.views.redirect_to_login'},
'django.contrib.contenttypes.admin': { 'django.contrib.contenttypes.admin.GenericInlineModelAdmin': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.admin.GenericInlineModelAdmin',
'django.contrib.contenttypes.admin.GenericStackedInline': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.admin.GenericStackedInline',
'django.contrib.contenttypes.admin.GenericTabularInline': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.admin.GenericTabularInline'},
'django.contrib.contenttypes.fields': { 'django.contrib.contenttypes.fields.GenericForeignKey': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.fields.GenericForeignKey',
'django.contrib.contenttypes.fields.GenericRelation': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.fields.GenericRelation'},
'django.contrib.contenttypes.forms': { 'django.contrib.contenttypes.forms.BaseGenericInlineFormSet': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.forms.BaseGenericInlineFormSet',
'django.contrib.contenttypes.forms.generic_inlineformset_factory': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.forms.generic_inlineformset_factory'},
'django.contrib.contenttypes.models': { 'django.contrib.contenttypes.models.ContentType': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentType',
'django.contrib.contenttypes.models.ContentTypeManager': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentTypeManager',
'django.contrib.contenttypes.models.ContentType.get_object_for_this_type': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentType.get_object_for_this_type',
'django.contrib.contenttypes.models.ContentType.model_class': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentType.model_class',
'django.contrib.contenttypes.models.ContentTypeManager.clear_cache': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentTypeManager.clear_cache',
'django.contrib.contenttypes.models.ContentTypeManager.get_by_natural_key': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentTypeManager.get_by_natural_key',
'django.contrib.contenttypes.models.ContentTypeManager.get_for_id': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentTypeManager.get_for_id',
'django.contrib.contenttypes.models.ContentTypeManager.get_for_model': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentTypeManager.get_for_model',
'django.contrib.contenttypes.models.ContentTypeManager.get_for_models': 'http://django.readthedocs.org/en/latest/ref/contrib/contenttypes.html#django.contrib.contenttypes.models.ContentTypeManager.get_for_models'},
'django.contrib.flatpages.middleware': { 'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware': 'http://django.readthedocs.org/en/latest/ref/contrib/flatpages.html#django.contrib.flatpages.middleware.FlatpageFallbackMiddleware'},
'django.contrib.flatpages.models': { 'django.contrib.flatpages.models.FlatPage': 'http://django.readthedocs.org/en/latest/ref/contrib/flatpages.html#django.contrib.flatpages.models.FlatPage'},
'django.contrib.flatpages.sitemaps': { 'django.contrib.flatpages.sitemaps.FlatPageSitemap': 'http://django.readthedocs.org/en/latest/ref/contrib/flatpages.html#django.contrib.flatpages.sitemaps.FlatPageSitemap'},
'django.contrib.gis.admin': { 'django.contrib.gis.admin.GISModelAdmin': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/admin.html#django.contrib.gis.admin.GISModelAdmin',
'django.contrib.gis.admin.GeoModelAdmin': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/admin.html#django.contrib.gis.admin.GeoModelAdmin',
'django.contrib.gis.admin.OSMGeoAdmin': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/admin.html#django.contrib.gis.admin.OSMGeoAdmin'},
'django.contrib.gis.db.models.functions': { 'django.contrib.gis.db.models.functions.Area': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Area',
'django.contrib.gis.db.models.functions.AsGML': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.AsGML',
'django.contrib.gis.db.models.functions.AsGeoJSON': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.AsGeoJSON',
'django.contrib.gis.db.models.functions.AsKML': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.AsKML',
'django.contrib.gis.db.models.functions.AsSVG': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.AsSVG',
'django.contrib.gis.db.models.functions.AsWKB': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.AsWKB',
'django.contrib.gis.db.models.functions.AsWKT': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.AsWKT',
'django.contrib.gis.db.models.functions.Azimuth': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Azimuth',
'django.contrib.gis.db.models.functions.BoundingCircle': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.BoundingCircle',
'django.contrib.gis.db.models.functions.Centroid': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Centroid',
'django.contrib.gis.db.models.functions.Difference': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Difference',
'django.contrib.gis.db.models.functions.Distance': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Distance',
'django.contrib.gis.db.models.functions.Envelope': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Envelope',
'django.contrib.gis.db.models.functions.ForcePolygonCW': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.ForcePolygonCW',
'django.contrib.gis.db.models.functions.GeoHash': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.GeoHash',
'django.contrib.gis.db.models.functions.GeometryDistance': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.GeometryDistance',
'django.contrib.gis.db.models.functions.Intersection': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Intersection',
'django.contrib.gis.db.models.functions.IsValid': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.IsValid',
'django.contrib.gis.db.models.functions.Length': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Length',
'django.contrib.gis.db.models.functions.LineLocatePoint': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.LineLocatePoint',
'django.contrib.gis.db.models.functions.MakeValid': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.MakeValid',
'django.contrib.gis.db.models.functions.MemSize': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.MemSize',
'django.contrib.gis.db.models.functions.NumGeometries': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.NumGeometries',
'django.contrib.gis.db.models.functions.NumPoints': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.NumPoints',
'django.contrib.gis.db.models.functions.Perimeter': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Perimeter',
'django.contrib.gis.db.models.functions.PointOnSurface': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.PointOnSurface',
'django.contrib.gis.db.models.functions.Reverse': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Reverse',
'django.contrib.gis.db.models.functions.Scale': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Scale',
'django.contrib.gis.db.models.functions.SnapToGrid': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.SnapToGrid',
'django.contrib.gis.db.models.functions.SymDifference': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.SymDifference',
'django.contrib.gis.db.models.functions.Transform': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Transform',
'django.contrib.gis.db.models.functions.Translate': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Translate',
'django.contrib.gis.db.models.functions.Union': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/functions.html#django.contrib.gis.db.models.functions.Union'},
'django.contrib.gis.feeds': { 'django.contrib.gis.feeds.Feed': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/feeds.html#django.contrib.gis.feeds.Feed',
'django.contrib.gis.feeds.GeoAtom1Feed': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/feeds.html#django.contrib.gis.feeds.GeoAtom1Feed',
'django.contrib.gis.feeds.GeoRSSFeed': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/feeds.html#django.contrib.gis.feeds.GeoRSSFeed',
'django.contrib.gis.feeds.W3CGeoFeed': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/feeds.html#django.contrib.gis.feeds.W3CGeoFeed',
'django.contrib.gis.feeds.Feed.geometry': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/feeds.html#django.contrib.gis.feeds.Feed.geometry',
'django.contrib.gis.feeds.Feed.item_geometry': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/feeds.html#django.contrib.gis.feeds.Feed.item_geometry'},
'django.contrib.gis.forms.widgets': { 'django.contrib.gis.forms.widgets.BaseGeometryWidget': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.widgets.BaseGeometryWidget',
'django.contrib.gis.forms.widgets.OSMWidget': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.widgets.OSMWidget',
'django.contrib.gis.forms.widgets.OpenLayersWidget': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/forms-api.html#django.contrib.gis.forms.widgets.OpenLayersWidget'},
'django.contrib.gis.gdal': { 'django.contrib.gis.gdal.CoordTransform': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.CoordTransform',
'django.contrib.gis.gdal.DataSource': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.DataSource',
'django.contrib.gis.gdal.Driver': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Driver',
'django.contrib.gis.gdal.Envelope': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Envelope',
'django.contrib.gis.gdal.Feature': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Feature',
'django.contrib.gis.gdal.Field': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Field',
'django.contrib.gis.gdal.GDALBand': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALBand',
'django.contrib.gis.gdal.GDALRaster': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALRaster',
'django.contrib.gis.gdal.GeometryCollection': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GeometryCollection',
'django.contrib.gis.gdal.Layer': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Layer',
'django.contrib.gis.gdal.LineString': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.LineString',
'django.contrib.gis.gdal.OGRGeomType': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeomType',
'django.contrib.gis.gdal.OGRGeometry': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry',
'django.contrib.gis.gdal.Point': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Point',
'django.contrib.gis.gdal.Polygon': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Polygon',
'django.contrib.gis.gdal.SpatialReference': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference',
'django.contrib.gis.gdal.Envelope.expand_to_include': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Envelope.expand_to_include',
'django.contrib.gis.gdal.Field.as_datetime': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Field.as_datetime',
'django.contrib.gis.gdal.Field.as_double': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Field.as_double',
'django.contrib.gis.gdal.Field.as_int': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Field.as_int',
'django.contrib.gis.gdal.Field.as_string': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Field.as_string',
'django.contrib.gis.gdal.GDALBand.color_interp': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALBand.color_interp',
'django.contrib.gis.gdal.GDALBand.data': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALBand.data',
'django.contrib.gis.gdal.GDALBand.datatype': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALBand.datatype',
'django.contrib.gis.gdal.GDALBand.statistics': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALBand.statistics',
'django.contrib.gis.gdal.GDALRaster.transform': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALRaster.transform',
'django.contrib.gis.gdal.GDALRaster.warp': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GDALRaster.warp',
'django.contrib.gis.gdal.GeometryCollection.add': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.GeometryCollection.add',
'django.contrib.gis.gdal.Layer.get_fields': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Layer.get_fields',
'django.contrib.gis.gdal.Layer.get_geoms': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Layer.get_geoms',
'django.contrib.gis.gdal.Layer.test_capability': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.Layer.test_capability',
'django.contrib.gis.gdal.OGRGeometry.__getitem__': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.__getitem__',
'django.contrib.gis.gdal.OGRGeometry.__iter__': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.__iter__',
'django.contrib.gis.gdal.OGRGeometry.__len__': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.__len__',
'django.contrib.gis.gdal.OGRGeometry.boundary': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.boundary',
'django.contrib.gis.gdal.OGRGeometry.clone': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.clone',
'django.contrib.gis.gdal.OGRGeometry.close_rings': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.close_rings',
'django.contrib.gis.gdal.OGRGeometry.contains': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.contains',
'django.contrib.gis.gdal.OGRGeometry.crosses': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.crosses',
'django.contrib.gis.gdal.OGRGeometry.difference': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.difference',
'django.contrib.gis.gdal.OGRGeometry.disjoint': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.disjoint',
'django.contrib.gis.gdal.OGRGeometry.equals': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.equals',
'django.contrib.gis.gdal.OGRGeometry.from_bbox': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.from_bbox',
'django.contrib.gis.gdal.OGRGeometry.from_gml': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.from_gml',
'django.contrib.gis.gdal.OGRGeometry.intersection': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.intersection',
'django.contrib.gis.gdal.OGRGeometry.intersects': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.intersects',
'django.contrib.gis.gdal.OGRGeometry.overlaps': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.overlaps',
'django.contrib.gis.gdal.OGRGeometry.sym_difference': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.sym_difference',
'django.contrib.gis.gdal.OGRGeometry.touches': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.touches',
'django.contrib.gis.gdal.OGRGeometry.transform': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.transform',
'django.contrib.gis.gdal.OGRGeometry.union': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.union',
'django.contrib.gis.gdal.OGRGeometry.within': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.OGRGeometry.within',
'django.contrib.gis.gdal.SpatialReference.__getitem__': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.__getitem__',
'django.contrib.gis.gdal.SpatialReference.attr_value': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.attr_value',
'django.contrib.gis.gdal.SpatialReference.auth_code': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.auth_code',
'django.contrib.gis.gdal.SpatialReference.auth_name': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.auth_name',
'django.contrib.gis.gdal.SpatialReference.clone': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.clone',
'django.contrib.gis.gdal.SpatialReference.from_esri': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.from_esri',
'django.contrib.gis.gdal.SpatialReference.identify_epsg': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.identify_epsg',
'django.contrib.gis.gdal.SpatialReference.import_epsg': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.import_epsg',
'django.contrib.gis.gdal.SpatialReference.import_proj': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.import_proj',
'django.contrib.gis.gdal.SpatialReference.import_user_input': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.import_user_input',
'django.contrib.gis.gdal.SpatialReference.import_wkt': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.import_wkt',
'django.contrib.gis.gdal.SpatialReference.import_xml': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.import_xml',
'django.contrib.gis.gdal.SpatialReference.to_esri': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.to_esri',
'django.contrib.gis.gdal.SpatialReference.validate': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/gdal.html#django.contrib.gis.gdal.SpatialReference.validate'},
'django.contrib.gis.geoip2': { 'django.contrib.gis.geoip2.GeoIP2': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2',
'django.contrib.gis.geoip2.GeoIP2.city': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.city',
'django.contrib.gis.geoip2.GeoIP2.coords': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.coords',
'django.contrib.gis.geoip2.GeoIP2.country': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.country',
'django.contrib.gis.geoip2.GeoIP2.country_code': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.country_code',
'django.contrib.gis.geoip2.GeoIP2.country_name': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.country_name',
'django.contrib.gis.geoip2.GeoIP2.geos': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.geos',
'django.contrib.gis.geoip2.GeoIP2.lat_lon': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.lat_lon',
'django.contrib.gis.geoip2.GeoIP2.lon_lat': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.lon_lat',
'django.contrib.gis.geoip2.GeoIP2.open': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geoip2.html#django.contrib.gis.geoip2.GeoIP2.open'},
'django.contrib.gis.geos': { 'django.contrib.gis.geos.GEOSGeometry': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry',
'django.contrib.gis.geos.GeometryCollection': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GeometryCollection',
'django.contrib.gis.geos.LineString': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.LineString',
'django.contrib.gis.geos.LinearRing': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.LinearRing',
'django.contrib.gis.geos.MultiLineString': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.MultiLineString',
'django.contrib.gis.geos.MultiPoint': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.MultiPoint',
'django.contrib.gis.geos.MultiPolygon': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.MultiPolygon',
'django.contrib.gis.geos.Point': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.Point',
'django.contrib.gis.geos.Polygon': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.Polygon',
'django.contrib.gis.geos.PreparedGeometry': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry',
'django.contrib.gis.geos.WKBReader': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.WKBReader',
'django.contrib.gis.geos.WKBWriter': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.WKBWriter',
'django.contrib.gis.geos.WKTReader': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.WKTReader',
'django.contrib.gis.geos.WKTWriter': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.WKTWriter',
'django.contrib.gis.geos.GEOSGeometry.buffer': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.buffer',
'django.contrib.gis.geos.GEOSGeometry.buffer_with_style': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.buffer_with_style',
'django.contrib.gis.geos.GEOSGeometry.clone': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.clone',
'django.contrib.gis.geos.GEOSGeometry.contains': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.contains',
'django.contrib.gis.geos.GEOSGeometry.covers': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.covers',
'django.contrib.gis.geos.GEOSGeometry.crosses': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.crosses',
'django.contrib.gis.geos.GEOSGeometry.difference': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.difference',
'django.contrib.gis.geos.GEOSGeometry.disjoint': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.disjoint',
'django.contrib.gis.geos.GEOSGeometry.distance': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.distance',
'django.contrib.gis.geos.GEOSGeometry.equals': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.equals',
'django.contrib.gis.geos.GEOSGeometry.equals_exact': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.equals_exact',
'django.contrib.gis.geos.GEOSGeometry.from_gml': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.from_gml',
'django.contrib.gis.geos.GEOSGeometry.interpolate': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.interpolate',
'django.contrib.gis.geos.GEOSGeometry.interpolate_normalized': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.interpolate_normalized',
'django.contrib.gis.geos.GEOSGeometry.intersection': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.intersection',
'django.contrib.gis.geos.GEOSGeometry.intersects': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.intersects',
'django.contrib.gis.geos.GEOSGeometry.make_valid': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.make_valid',
'django.contrib.gis.geos.GEOSGeometry.normalize': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.normalize',
'django.contrib.gis.geos.GEOSGeometry.overlaps': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.overlaps',
'django.contrib.gis.geos.GEOSGeometry.project': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.project',
'django.contrib.gis.geos.GEOSGeometry.project_normalized': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.project_normalized',
'django.contrib.gis.geos.GEOSGeometry.relate': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.relate',
'django.contrib.gis.geos.GEOSGeometry.relate_pattern': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.relate_pattern',
'django.contrib.gis.geos.GEOSGeometry.simplify': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.simplify',
'django.contrib.gis.geos.GEOSGeometry.sym_difference': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.sym_difference',
'django.contrib.gis.geos.GEOSGeometry.touches': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.touches',
'django.contrib.gis.geos.GEOSGeometry.transform': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.transform',
'django.contrib.gis.geos.GEOSGeometry.union': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.union',
'django.contrib.gis.geos.GEOSGeometry.within': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.GEOSGeometry.within',
'django.contrib.gis.geos.Polygon.from_bbox': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.Polygon.from_bbox',
'django.contrib.gis.geos.PreparedGeometry.contains': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.contains',
'django.contrib.gis.geos.PreparedGeometry.contains_properly': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.contains_properly',
'django.contrib.gis.geos.PreparedGeometry.covers': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.covers',
'django.contrib.gis.geos.PreparedGeometry.crosses': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.crosses',
'django.contrib.gis.geos.PreparedGeometry.disjoint': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.disjoint',
'django.contrib.gis.geos.PreparedGeometry.intersects': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.intersects',
'django.contrib.gis.geos.PreparedGeometry.overlaps': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.overlaps',
'django.contrib.gis.geos.PreparedGeometry.touches': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.touches',
'django.contrib.gis.geos.PreparedGeometry.within': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.PreparedGeometry.within',
'django.contrib.gis.geos.WKBWriter.write': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.WKBWriter.write',
'django.contrib.gis.geos.WKBWriter.write_hex': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.WKBWriter.write_hex',
'django.contrib.gis.geos.WKTWriter.write': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.WKTWriter.write',
'django.contrib.gis.geos.fromfile': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.fromfile',
'django.contrib.gis.geos.fromstr': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/geos.html#django.contrib.gis.geos.fromstr'},
'django.contrib.gis.measure': { 'django.contrib.gis.measure.A': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.A',
'django.contrib.gis.measure.Area': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.Area',
'django.contrib.gis.measure.D': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.D',
'django.contrib.gis.measure.Distance': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.Distance',
'django.contrib.gis.measure.Area.__getattr__': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.Area.__getattr__',
'django.contrib.gis.measure.Area.unit_attname': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.Area.unit_attname',
'django.contrib.gis.measure.Distance.__getattr__': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.Distance.__getattr__',
'django.contrib.gis.measure.Distance.unit_attname': 'http://django.readthedocs.org/en/latest/ref/contrib/gis/measure.html#django.contrib.gis.measure.Distance.unit_attname'},
'django.contrib.messages.middleware': { 'django.contrib.messages.middleware.MessageMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.contrib.messages.middleware.MessageMiddleware'},
'django.contrib.messages.storage.base': { 'django.contrib.messages.storage.base.BaseStorage': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.storage.base.BaseStorage',
'django.contrib.messages.storage.base.Message': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.storage.base.Message'},
'django.contrib.messages.storage.cookie': { 'django.contrib.messages.storage.cookie.CookieStorage': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.storage.cookie.CookieStorage'},
'django.contrib.messages.storage.fallback': { 'django.contrib.messages.storage.fallback.FallbackStorage': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.storage.fallback.FallbackStorage'},
'django.contrib.messages.storage.session': { 'django.contrib.messages.storage.session.SessionStorage': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.storage.session.SessionStorage'},
'django.contrib.messages.views': { 'django.contrib.messages.views.SuccessMessageMixin': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.views.SuccessMessageMixin',
'django.contrib.messages.views.SuccessMessageMixin.get_success_message': 'http://django.readthedocs.org/en/latest/ref/contrib/messages.html#django.contrib.messages.views.SuccessMessageMixin.get_success_message'},
'django.contrib.postgres.aggregates': { 'django.contrib.postgres.aggregates.ArrayAgg': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.ArrayAgg',
'django.contrib.postgres.aggregates.BitAnd': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.BitAnd',
'django.contrib.postgres.aggregates.BitOr': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.BitOr',
'django.contrib.postgres.aggregates.BitXor': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.BitXor',
'django.contrib.postgres.aggregates.BoolAnd': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.BoolAnd',
'django.contrib.postgres.aggregates.BoolOr': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.BoolOr',
'django.contrib.postgres.aggregates.Corr': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.Corr',
'django.contrib.postgres.aggregates.CovarPop': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.CovarPop',
'django.contrib.postgres.aggregates.JSONBAgg': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.JSONBAgg',
'django.contrib.postgres.aggregates.RegrAvgX': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrAvgX',
'django.contrib.postgres.aggregates.RegrAvgY': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrAvgY',
'django.contrib.postgres.aggregates.RegrCount': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrCount',
'django.contrib.postgres.aggregates.RegrIntercept': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrIntercept',
'django.contrib.postgres.aggregates.RegrR2': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrR2',
'django.contrib.postgres.aggregates.RegrSXX': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrSXX',
'django.contrib.postgres.aggregates.RegrSXY': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrSXY',
'django.contrib.postgres.aggregates.RegrSYY': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrSYY',
'django.contrib.postgres.aggregates.RegrSlope': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.RegrSlope',
'django.contrib.postgres.aggregates.StringAgg': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/aggregates.html#django.contrib.postgres.aggregates.StringAgg'},
'django.contrib.postgres.constraints': { 'django.contrib.postgres.constraints.ExclusionConstraint': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/constraints.html#django.contrib.postgres.constraints.ExclusionConstraint'},
'django.contrib.postgres.expressions': { 'django.contrib.postgres.expressions.ArraySubquery': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/expressions.html#django.contrib.postgres.expressions.ArraySubquery'},
'django.contrib.postgres.fields': { 'django.contrib.postgres.fields.ArrayField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.ArrayField',
'django.contrib.postgres.fields.BigIntegerRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.BigIntegerRangeField',
'django.contrib.postgres.fields.CICharField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.CICharField',
'django.contrib.postgres.fields.CIEmailField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.CIEmailField',
'django.contrib.postgres.fields.CIText': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.CIText',
'django.contrib.postgres.fields.CITextField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.CITextField',
'django.contrib.postgres.fields.DateRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.DateRangeField',
'django.contrib.postgres.fields.DateTimeRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.DateTimeRangeField',
'django.contrib.postgres.fields.DecimalRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.DecimalRangeField',
'django.contrib.postgres.fields.HStoreField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.HStoreField',
'django.contrib.postgres.fields.IntegerRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.IntegerRangeField',
'django.contrib.postgres.fields.RangeBoundary': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.RangeBoundary',
'django.contrib.postgres.fields.RangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.RangeField',
'django.contrib.postgres.fields.RangeOperators': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.RangeOperators'},
'django.contrib.postgres.fields.django.contrib.postgres.forms': { 'django.contrib.postgres.fields.django.contrib.postgres.forms.BaseRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/fields.html#django.contrib.postgres.fields.django.contrib.postgres.forms.BaseRangeField'},
'django.contrib.postgres.forms': { 'django.contrib.postgres.forms.DateRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.DateRangeField',
'django.contrib.postgres.forms.DateTimeRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.DateTimeRangeField',
'django.contrib.postgres.forms.DecimalRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.DecimalRangeField',
'django.contrib.postgres.forms.HStoreField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.HStoreField',
'django.contrib.postgres.forms.IntegerRangeField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.IntegerRangeField',
'django.contrib.postgres.forms.RangeWidget': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.RangeWidget',
'django.contrib.postgres.forms.SimpleArrayField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.SimpleArrayField',
'django.contrib.postgres.forms.SplitArrayField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.SplitArrayField',
'django.contrib.postgres.forms.RangeWidget.decompress': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/forms.html#django.contrib.postgres.forms.RangeWidget.decompress'},
'django.contrib.postgres.functions': { 'django.contrib.postgres.functions.RandomUUID': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/functions.html#django.contrib.postgres.functions.RandomUUID',
'django.contrib.postgres.functions.TransactionNow': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/functions.html#django.contrib.postgres.functions.TransactionNow'},
'django.contrib.postgres.indexes': { 'django.contrib.postgres.indexes.BTreeIndex': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.BTreeIndex',
'django.contrib.postgres.indexes.BloomIndex': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.BloomIndex',
'django.contrib.postgres.indexes.BrinIndex': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.BrinIndex',
'django.contrib.postgres.indexes.GinIndex': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.GinIndex',
'django.contrib.postgres.indexes.GistIndex': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.GistIndex',
'django.contrib.postgres.indexes.HashIndex': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.HashIndex',
'django.contrib.postgres.indexes.OpClass': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.OpClass',
'django.contrib.postgres.indexes.SpGistIndex': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/indexes.html#django.contrib.postgres.indexes.SpGistIndex'},
'django.contrib.postgres.operations': { 'django.contrib.postgres.operations.AddConstraintNotValid': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.AddConstraintNotValid',
'django.contrib.postgres.operations.AddIndexConcurrently': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.AddIndexConcurrently',
'django.contrib.postgres.operations.BloomExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.BloomExtension',
'django.contrib.postgres.operations.BtreeGinExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.BtreeGinExtension',
'django.contrib.postgres.operations.BtreeGistExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.BtreeGistExtension',
'django.contrib.postgres.operations.CITextExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.CITextExtension',
'django.contrib.postgres.operations.CreateCollation': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.CreateCollation',
'django.contrib.postgres.operations.CreateExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.CreateExtension',
'django.contrib.postgres.operations.CryptoExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.CryptoExtension',
'django.contrib.postgres.operations.HStoreExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.HStoreExtension',
'django.contrib.postgres.operations.RemoveCollation': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.RemoveCollation',
'django.contrib.postgres.operations.RemoveIndexConcurrently': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.RemoveIndexConcurrently',
'django.contrib.postgres.operations.TrigramExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.TrigramExtension',
'django.contrib.postgres.operations.UnaccentExtension': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.UnaccentExtension',
'django.contrib.postgres.operations.ValidateConstraint': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/operations.html#django.contrib.postgres.operations.ValidateConstraint'},
'django.contrib.postgres.search': { 'django.contrib.postgres.search.SearchHeadline': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.SearchHeadline',
'django.contrib.postgres.search.SearchQuery': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.SearchQuery',
'django.contrib.postgres.search.SearchRank': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.SearchRank',
'django.contrib.postgres.search.SearchVector': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.SearchVector',
'django.contrib.postgres.search.SearchVectorField': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.SearchVectorField',
'django.contrib.postgres.search.TrigramDistance': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.TrigramDistance',
'django.contrib.postgres.search.TrigramSimilarity': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.TrigramSimilarity',
'django.contrib.postgres.search.TrigramWordDistance': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.TrigramWordDistance',
'django.contrib.postgres.search.TrigramWordSimilarity': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/search.html#django.contrib.postgres.search.TrigramWordSimilarity'},
'django.contrib.postgres.validators': { 'django.contrib.postgres.validators.KeysValidator': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/validators.html#django.contrib.postgres.validators.KeysValidator',
'django.contrib.postgres.validators.RangeMaxValueValidator': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/validators.html#django.contrib.postgres.validators.RangeMaxValueValidator',
'django.contrib.postgres.validators.RangeMinValueValidator': 'http://django.readthedocs.org/en/latest/ref/contrib/postgres/validators.html#django.contrib.postgres.validators.RangeMinValueValidator'},
'django.contrib.redirects.middleware': { 'django.contrib.redirects.middleware.RedirectFallbackMiddleware': 'http://django.readthedocs.org/en/latest/ref/contrib/redirects.html#django.contrib.redirects.middleware.RedirectFallbackMiddleware'},
'django.contrib.redirects.models': { 'django.contrib.redirects.models.Redirect': 'http://django.readthedocs.org/en/latest/ref/contrib/redirects.html#django.contrib.redirects.models.Redirect'},
'django.contrib.sessions.backends.base': { 'django.contrib.sessions.backends.base.SessionBase': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase',
'django.contrib.sessions.backends.base.SessionBase.__contains__': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.__contains__',
'django.contrib.sessions.backends.base.SessionBase.__delitem__': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.__delitem__',
'django.contrib.sessions.backends.base.SessionBase.__getitem__': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.__getitem__',
'django.contrib.sessions.backends.base.SessionBase.__setitem__': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.__setitem__',
'django.contrib.sessions.backends.base.SessionBase.clear': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.clear',
'django.contrib.sessions.backends.base.SessionBase.clear_expired': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.clear_expired',
'django.contrib.sessions.backends.base.SessionBase.cycle_key': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.cycle_key',
'django.contrib.sessions.backends.base.SessionBase.delete_test_cookie': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.delete_test_cookie',
'django.contrib.sessions.backends.base.SessionBase.flush': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.flush',
'django.contrib.sessions.backends.base.SessionBase.get': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.get',
'django.contrib.sessions.backends.base.SessionBase.get_expire_at_browser_close': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.get_expire_at_browser_close',
'django.contrib.sessions.backends.base.SessionBase.get_expiry_age': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.get_expiry_age',
'django.contrib.sessions.backends.base.SessionBase.get_expiry_date': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.get_expiry_date',
'django.contrib.sessions.backends.base.SessionBase.get_session_cookie_age': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.get_session_cookie_age',
'django.contrib.sessions.backends.base.SessionBase.items': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.items',
'django.contrib.sessions.backends.base.SessionBase.keys': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.keys',
'django.contrib.sessions.backends.base.SessionBase.pop': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.pop',
'django.contrib.sessions.backends.base.SessionBase.set_expiry': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.set_expiry',
'django.contrib.sessions.backends.base.SessionBase.set_test_cookie': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.set_test_cookie',
'django.contrib.sessions.backends.base.SessionBase.setdefault': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.setdefault',
'django.contrib.sessions.backends.base.SessionBase.test_cookie_worked': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.base.SessionBase.test_cookie_worked'},
'django.contrib.sessions.backends.cached_db': { 'django.contrib.sessions.backends.cached_db.SessionStore': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.cached_db.SessionStore'},
'django.contrib.sessions.backends.db': { 'django.contrib.sessions.backends.db.SessionStore': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.db.SessionStore',
'django.contrib.sessions.backends.db.SessionStore.create_model_instance': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.db.SessionStore.create_model_instance',
'django.contrib.sessions.backends.db.SessionStore.get_model_class': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.backends.db.SessionStore.get_model_class'},
'django.contrib.sessions.base_session': { 'django.contrib.sessions.base_session.AbstractBaseSession': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.base_session.AbstractBaseSession',
'django.contrib.sessions.base_session.BaseSessionManager': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.base_session.BaseSessionManager',
'django.contrib.sessions.base_session.AbstractBaseSession.get_decoded': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.base_session.AbstractBaseSession.get_decoded',
'django.contrib.sessions.base_session.AbstractBaseSession.get_session_store_class': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.base_session.AbstractBaseSession.get_session_store_class',
'django.contrib.sessions.base_session.BaseSessionManager.encode': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.base_session.BaseSessionManager.encode',
'django.contrib.sessions.base_session.BaseSessionManager.save': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.base_session.BaseSessionManager.save'},
'django.contrib.sessions.middleware': { 'django.contrib.sessions.middleware.SessionMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.contrib.sessions.middleware.SessionMiddleware'},
'django.contrib.sessions.serializers': { 'django.contrib.sessions.serializers.JSONSerializer': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.serializers.JSONSerializer',
'django.contrib.sessions.serializers.PickleSerializer': 'http://django.readthedocs.org/en/latest/topics/http/sessions.html#django.contrib.sessions.serializers.PickleSerializer'},
'django.contrib.sitemaps': { 'django.contrib.sitemaps.GenericSitemap': 'http://django.readthedocs.org/en/latest/ref/contrib/sitemaps.html#django.contrib.sitemaps.GenericSitemap',
'django.contrib.sitemaps.Sitemap': 'http://django.readthedocs.org/en/latest/ref/contrib/sitemaps.html#django.contrib.sitemaps.Sitemap',
'django.contrib.sitemaps.Sitemap.get_latest_lastmod': 'http://django.readthedocs.org/en/latest/ref/contrib/sitemaps.html#django.contrib.sitemaps.Sitemap.get_latest_lastmod',
'django.contrib.sitemaps.ping_google': 'http://django.readthedocs.org/en/latest/ref/contrib/sitemaps.html#django.contrib.sitemaps.ping_google'},
'django.contrib.sites.managers': { 'django.contrib.sites.managers.CurrentSiteManager': 'http://django.readthedocs.org/en/latest/ref/contrib/sites.html#django.contrib.sites.managers.CurrentSiteManager'},
'django.contrib.sites.middleware': { 'django.contrib.sites.middleware.CurrentSiteMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.contrib.sites.middleware.CurrentSiteMiddleware'},
'django.contrib.sites.models': { 'django.contrib.sites.models.Site': 'http://django.readthedocs.org/en/latest/ref/contrib/sites.html#django.contrib.sites.models.Site'},
'django.contrib.sites.requests': { 'django.contrib.sites.requests.RequestSite': 'http://django.readthedocs.org/en/latest/ref/contrib/sites.html#django.contrib.sites.requests.RequestSite',
'django.contrib.sites.requests.RequestSite.__init__': 'http://django.readthedocs.org/en/latest/ref/contrib/sites.html#django.contrib.sites.requests.RequestSite.__init__'},
'django.contrib.staticfiles.storage': { 'django.contrib.staticfiles.storage.ManifestFilesMixin': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.storage.ManifestFilesMixin',
'django.contrib.staticfiles.storage.ManifestStaticFilesStorage': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.storage.ManifestStaticFilesStorage',
'django.contrib.staticfiles.storage.StaticFilesStorage': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.storage.StaticFilesStorage',
'django.contrib.staticfiles.storage.ManifestStaticFilesStorage.file_hash': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.storage.ManifestStaticFilesStorage.file_hash',
'django.contrib.staticfiles.storage.StaticFilesStorage.post_process': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.storage.StaticFilesStorage.post_process'},
'django.contrib.staticfiles.testing': { 'django.contrib.staticfiles.testing.StaticLiveServerTestCase': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.testing.StaticLiveServerTestCase'},
'django.contrib.syndication.views': { 'django.contrib.syndication.views.Feed': 'http://django.readthedocs.org/en/latest/ref/contrib/syndication.html#django.contrib.syndication.views.Feed'},
'django.core.checks': { 'django.core.checks.CheckMessage': 'http://django.readthedocs.org/en/latest/ref/checks.html#django.core.checks.CheckMessage',
'django.core.checks.Critical': 'http://django.readthedocs.org/en/latest/ref/checks.html#django.core.checks.Critical',
'django.core.checks.Debug': 'http://django.readthedocs.org/en/latest/ref/checks.html#django.core.checks.Debug',
'django.core.checks.Error': 'http://django.readthedocs.org/en/latest/ref/checks.html#django.core.checks.Error',
'django.core.checks.Info': 'http://django.readthedocs.org/en/latest/ref/checks.html#django.core.checks.Info',
'django.core.checks.Warning': 'http://django.readthedocs.org/en/latest/ref/checks.html#django.core.checks.Warning',
'django.core.checks.register': 'http://django.readthedocs.org/en/latest/topics/checks.html#django.core.checks.register'},
'django.core.files.base': { 'django.core.files.base.ContentFile': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.base.ContentFile'},
'django.core.files.images': { 'django.core.files.images.ImageFile': 'http://django.readthedocs.org/en/latest/ref/files/file.html#django.core.files.images.ImageFile'},
'django.core.files.storage': { 'django.core.files.storage.DefaultStorage': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.DefaultStorage',
'django.core.files.storage.FileSystemStorage': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.FileSystemStorage',
'django.core.files.storage.Storage': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage',
'django.core.files.storage.FileSystemStorage.get_created_time': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.FileSystemStorage.get_created_time',
'django.core.files.storage.Storage.delete': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.delete',
'django.core.files.storage.Storage.exists': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.exists',
'django.core.files.storage.Storage.generate_filename': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.generate_filename',
'django.core.files.storage.Storage.get_accessed_time': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.get_accessed_time',
'django.core.files.storage.Storage.get_alternative_name': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.get_alternative_name',
'django.core.files.storage.Storage.get_available_name': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.get_available_name',
'django.core.files.storage.Storage.get_created_time': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.get_created_time',
'django.core.files.storage.Storage.get_modified_time': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.get_modified_time',
'django.core.files.storage.Storage.get_valid_name': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.get_valid_name',
'django.core.files.storage.Storage.listdir': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.listdir',
'django.core.files.storage.Storage.open': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.open',
'django.core.files.storage.Storage.path': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.path',
'django.core.files.storage.Storage.save': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.save',
'django.core.files.storage.Storage.size': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.size',
'django.core.files.storage.Storage.url': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.Storage.url',
'django.core.files.storage.get_storage_class': 'http://django.readthedocs.org/en/latest/ref/files/storage.html#django.core.files.storage.get_storage_class'},
'django.core.files.uploadedfile': { 'django.core.files.uploadedfile.InMemoryUploadedFile': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadedfile.InMemoryUploadedFile',
'django.core.files.uploadedfile.TemporaryUploadedFile': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadedfile.TemporaryUploadedFile',
'django.core.files.uploadedfile.UploadedFile': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadedfile.UploadedFile',
'django.core.files.uploadedfile.TemporaryUploadedFile.temporary_file_path': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadedfile.TemporaryUploadedFile.temporary_file_path',
'django.core.files.uploadedfile.UploadedFile.chunks': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadedfile.UploadedFile.chunks',
'django.core.files.uploadedfile.UploadedFile.multiple_chunks': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadedfile.UploadedFile.multiple_chunks',
'django.core.files.uploadedfile.UploadedFile.read': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadedfile.UploadedFile.read'},
'django.core.files.uploadhandler': { 'django.core.files.uploadhandler.FileUploadHandler': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.FileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.TemporaryFileUploadHandler',
'django.core.files.uploadhandler.FileUploadHandler.file_complete': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.FileUploadHandler.file_complete',
'django.core.files.uploadhandler.FileUploadHandler.handle_raw_input': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.FileUploadHandler.handle_raw_input',
'django.core.files.uploadhandler.FileUploadHandler.new_file': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.FileUploadHandler.new_file',
'django.core.files.uploadhandler.FileUploadHandler.receive_data_chunk': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.FileUploadHandler.receive_data_chunk',
'django.core.files.uploadhandler.FileUploadHandler.upload_complete': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.FileUploadHandler.upload_complete',
'django.core.files.uploadhandler.FileUploadHandler.upload_interrupted': 'http://django.readthedocs.org/en/latest/ref/files/uploads.html#django.core.files.uploadhandler.FileUploadHandler.upload_interrupted'},
'django.core.mail': { 'django.core.mail.EmailMessage': 'http://django.readthedocs.org/en/latest/topics/email.html#django.core.mail.EmailMessage',
'django.core.mail.get_connection': 'http://django.readthedocs.org/en/latest/topics/email.html#django.core.mail.get_connection',
'django.core.mail.mail_admins': 'http://django.readthedocs.org/en/latest/topics/email.html#django.core.mail.mail_admins',
'django.core.mail.mail_managers': 'http://django.readthedocs.org/en/latest/topics/email.html#django.core.mail.mail_managers',
'django.core.mail.send_mail': 'http://django.readthedocs.org/en/latest/topics/email.html#django.core.mail.send_mail',
'django.core.mail.send_mass_mail': 'http://django.readthedocs.org/en/latest/topics/email.html#django.core.mail.send_mass_mail'},
'django.core.mail.backends.smtp': { 'django.core.mail.backends.smtp.EmailBackend': 'http://django.readthedocs.org/en/latest/topics/email.html#django.core.mail.backends.smtp.EmailBackend'},
'django.core.management': { 'django.core.management.AppCommand': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.AppCommand',
'django.core.management.BaseCommand': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.BaseCommand',
'django.core.management.LabelCommand': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.LabelCommand',
'django.core.management.AppCommand.handle_app_config': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.AppCommand.handle_app_config',
'django.core.management.BaseCommand.add_arguments': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.BaseCommand.add_arguments',
'django.core.management.BaseCommand.check': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.BaseCommand.check',
'django.core.management.BaseCommand.create_parser': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.BaseCommand.create_parser',
'django.core.management.BaseCommand.execute': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.BaseCommand.execute',
'django.core.management.BaseCommand.get_version': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.BaseCommand.get_version',
'django.core.management.BaseCommand.handle': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.BaseCommand.handle',
'django.core.management.LabelCommand.handle_label': 'http://django.readthedocs.org/en/latest/howto/custom-management-commands.html#django.core.management.LabelCommand.handle_label',
'django.core.management.call_command': 'http://django.readthedocs.org/en/latest/ref/django-admin.html#django.core.management.call_command'},
'django.core.paginator': { 'django.core.paginator.Page': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page',
'django.core.paginator.Paginator': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Paginator',
'django.core.paginator.Page.end_index': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page.end_index',
'django.core.paginator.Page.has_next': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page.has_next',
'django.core.paginator.Page.has_other_pages': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page.has_other_pages',
'django.core.paginator.Page.has_previous': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page.has_previous',
'django.core.paginator.Page.next_page_number': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page.next_page_number',
'django.core.paginator.Page.previous_page_number': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page.previous_page_number',
'django.core.paginator.Page.start_index': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Page.start_index',
'django.core.paginator.Paginator.get_elided_page_range': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Paginator.get_elided_page_range',
'django.core.paginator.Paginator.get_page': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Paginator.get_page',
'django.core.paginator.Paginator.page': 'http://django.readthedocs.org/en/latest/ref/paginator.html#django.core.paginator.Paginator.page'},
'django.core.serializers.json': { 'django.core.serializers.json.DjangoJSONEncoder': 'http://django.readthedocs.org/en/latest/topics/serialization.html#django.core.serializers.json.DjangoJSONEncoder'},
'django.core.signing': { 'django.core.signing.Signer': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.Signer',
'django.core.signing.TimestampSigner': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.TimestampSigner',
'django.core.signing.TimestampSigner.sign': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.TimestampSigner.sign',
'django.core.signing.TimestampSigner.sign_object': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.TimestampSigner.sign_object',
'django.core.signing.TimestampSigner.unsign': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.TimestampSigner.unsign',
'django.core.signing.TimestampSigner.unsign_object': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.TimestampSigner.unsign_object',
'django.core.signing.dumps': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.dumps',
'django.core.signing.loads': 'http://django.readthedocs.org/en/latest/topics/signing.html#django.core.signing.loads'},
'django.core.validators': { 'django.core.validators.DecimalValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.DecimalValidator',
'django.core.validators.EmailValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.EmailValidator',
'django.core.validators.FileExtensionValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.FileExtensionValidator',
'django.core.validators.MaxLengthValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.MaxLengthValidator',
'django.core.validators.MaxValueValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.MaxValueValidator',
'django.core.validators.MinLengthValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.MinLengthValidator',
'django.core.validators.MinValueValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.MinValueValidator',
'django.core.validators.ProhibitNullCharactersValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.ProhibitNullCharactersValidator',
'django.core.validators.RegexValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.RegexValidator',
'django.core.validators.StepValueValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.StepValueValidator',
'django.core.validators.URLValidator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.URLValidator',
'django.core.validators.int_list_validator': 'http://django.readthedocs.org/en/latest/ref/validators.html#django.core.validators.int_list_validator'},
'django.db.backends.base.schema': { 'django.db.backends.base.schema.BaseDatabaseSchemaEditor': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.add_constraint': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.add_constraint',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.add_field': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.add_field',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.add_index': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.add_index',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_db_table': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_db_table',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_db_tablespace': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_db_tablespace',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_field': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_field',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_index_together': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_index_together',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_unique_together': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.alter_unique_together',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.create_model': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.create_model',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.delete_model': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.delete_model',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.execute': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.execute',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.remove_constraint': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.remove_constraint',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.remove_field': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.remove_field',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.remove_index': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.remove_index',
'django.db.backends.base.schema.BaseDatabaseSchemaEditor.rename_index': 'http://django.readthedocs.org/en/latest/ref/schema-editor.html#django.db.backends.base.schema.BaseDatabaseSchemaEditor.rename_index'},
'django.db.migrations.operations': { 'django.db.migrations.operations.AddConstraint': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AddConstraint',
'django.db.migrations.operations.AddField': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AddField',
'django.db.migrations.operations.AddIndex': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AddIndex',
'django.db.migrations.operations.AlterField': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AlterField',
'django.db.migrations.operations.AlterIndexTogether': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AlterIndexTogether',
'django.db.migrations.operations.AlterModelManagers': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AlterModelManagers',
'django.db.migrations.operations.AlterModelOptions': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AlterModelOptions',
'django.db.migrations.operations.AlterModelTable': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AlterModelTable',
'django.db.migrations.operations.AlterOrderWithRespectTo': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AlterOrderWithRespectTo',
'django.db.migrations.operations.AlterUniqueTogether': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.AlterUniqueTogether',
'django.db.migrations.operations.CreateModel': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.CreateModel',
'django.db.migrations.operations.DeleteModel': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.DeleteModel',
'django.db.migrations.operations.RemoveConstraint': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RemoveConstraint',
'django.db.migrations.operations.RemoveField': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RemoveField',
'django.db.migrations.operations.RemoveIndex': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RemoveIndex',
'django.db.migrations.operations.RenameField': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RenameField',
'django.db.migrations.operations.RenameIndex': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RenameIndex',
'django.db.migrations.operations.RenameModel': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RenameModel',
'django.db.migrations.operations.RunPython': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RunPython',
'django.db.migrations.operations.RunSQL': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RunSQL',
'django.db.migrations.operations.SeparateDatabaseAndState': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.SeparateDatabaseAndState',
'django.db.migrations.operations.RunPython.noop': 'http://django.readthedocs.org/en/latest/ref/migration-operations.html#django.db.migrations.operations.RunPython.noop'},
'django.db.models.expressions': { 'django.db.models.expressions.Case': 'http://django.readthedocs.org/en/latest/ref/models/conditional-expressions.html#django.db.models.expressions.Case',
'django.db.models.expressions.RawSQL': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.expressions.RawSQL',
'django.db.models.expressions.RowRange': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.expressions.RowRange',
'django.db.models.expressions.ValueRange': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.expressions.ValueRange',
'django.db.models.expressions.When': 'http://django.readthedocs.org/en/latest/ref/models/conditional-expressions.html#django.db.models.expressions.When',
'django.db.models.expressions.Window': 'http://django.readthedocs.org/en/latest/ref/models/expressions.html#django.db.models.expressions.Window'},
'django.db.models.fields.files': { 'django.db.models.fields.files.FieldFile': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.fields.files.FieldFile',
'django.db.models.fields.files.FieldFile.close': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.fields.files.FieldFile.close',
'django.db.models.fields.files.FieldFile.delete': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.fields.files.FieldFile.delete',
'django.db.models.fields.files.FieldFile.open': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.fields.files.FieldFile.open',
'django.db.models.fields.files.FieldFile.save': 'http://django.readthedocs.org/en/latest/ref/models/fields.html#django.db.models.fields.files.FieldFile.save'},
'django.db.models.fields.related': { 'django.db.models.fields.related.RelatedManager': 'http://django.readthedocs.org/en/latest/ref/models/relations.html#django.db.models.fields.related.RelatedManager',
'django.db.models.fields.related.RelatedManager.add': 'http://django.readthedocs.org/en/latest/ref/models/relations.html#django.db.models.fields.related.RelatedManager.add',
'django.db.models.fields.related.RelatedManager.clear': 'http://django.readthedocs.org/en/latest/ref/models/relations.html#django.db.models.fields.related.RelatedManager.clear',
'django.db.models.fields.related.RelatedManager.create': 'http://django.readthedocs.org/en/latest/ref/models/relations.html#django.db.models.fields.related.RelatedManager.create',
'django.db.models.fields.related.RelatedManager.remove': 'http://django.readthedocs.org/en/latest/ref/models/relations.html#django.db.models.fields.related.RelatedManager.remove',
'django.db.models.fields.related.RelatedManager.set': 'http://django.readthedocs.org/en/latest/ref/models/relations.html#django.db.models.fields.related.RelatedManager.set'},
'django.db.models.functions': { 'django.db.models.functions.ACos': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ACos',
'django.db.models.functions.ASin': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ASin',
'django.db.models.functions.ATan': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ATan',
'django.db.models.functions.ATan2': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ATan2',
'django.db.models.functions.Abs': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Abs',
'django.db.models.functions.Cast': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Cast',
'django.db.models.functions.Ceil': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Ceil',
'django.db.models.functions.Chr': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Chr',
'django.db.models.functions.Coalesce': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Coalesce',
'django.db.models.functions.Collate': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Collate',
'django.db.models.functions.Concat': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Concat',
'django.db.models.functions.Cos': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Cos',
'django.db.models.functions.Cot': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Cot',
'django.db.models.functions.CumeDist': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.CumeDist',
'django.db.models.functions.Degrees': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Degrees',
'django.db.models.functions.DenseRank': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.DenseRank',
'django.db.models.functions.Exp': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Exp',
'django.db.models.functions.Extract': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Extract',
'django.db.models.functions.ExtractDay': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractDay',
'django.db.models.functions.ExtractHour': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractHour',
'django.db.models.functions.ExtractIsoWeekDay': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractIsoWeekDay',
'django.db.models.functions.ExtractIsoYear': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractIsoYear',
'django.db.models.functions.ExtractMinute': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractMinute',
'django.db.models.functions.ExtractMonth': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractMonth',
'django.db.models.functions.ExtractQuarter': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractQuarter',
'django.db.models.functions.ExtractSecond': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractSecond',
'django.db.models.functions.ExtractWeek': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractWeek',
'django.db.models.functions.ExtractWeekDay': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractWeekDay',
'django.db.models.functions.ExtractYear': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.ExtractYear',
'django.db.models.functions.FirstValue': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.FirstValue',
'django.db.models.functions.Floor': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Floor',
'django.db.models.functions.Greatest': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Greatest',
'django.db.models.functions.JSONObject': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.JSONObject',
'django.db.models.functions.LPad': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.LPad',
'django.db.models.functions.LTrim': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.LTrim',
'django.db.models.functions.Lag': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Lag',
'django.db.models.functions.LastValue': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.LastValue',
'django.db.models.functions.Lead': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Lead',
'django.db.models.functions.Least': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Least',
'django.db.models.functions.Left': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Left',
'django.db.models.functions.Length': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Length',
'django.db.models.functions.Ln': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Ln',
'django.db.models.functions.Log': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Log',
'django.db.models.functions.Lower': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Lower',
'django.db.models.functions.MD5': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.MD5',
'django.db.models.functions.Mod': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Mod',
'django.db.models.functions.Now': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Now',
'django.db.models.functions.NthValue': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.NthValue',
'django.db.models.functions.Ntile': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Ntile',
'django.db.models.functions.NullIf': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.NullIf',
'django.db.models.functions.Ord': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Ord',
'django.db.models.functions.PercentRank': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.PercentRank',
'django.db.models.functions.Pi': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Pi',
'django.db.models.functions.Power': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Power',
'django.db.models.functions.RPad': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.RPad',
'django.db.models.functions.RTrim': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.RTrim',
'django.db.models.functions.Radians': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Radians',
'django.db.models.functions.Random': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Random',
'django.db.models.functions.Rank': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Rank',
'django.db.models.functions.Repeat': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Repeat',
'django.db.models.functions.Replace': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Replace',
'django.db.models.functions.Reverse': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Reverse',
'django.db.models.functions.Right': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Right',
'django.db.models.functions.Round': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Round',
'django.db.models.functions.RowNumber': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.RowNumber',
'django.db.models.functions.SHA1': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.SHA1',
'django.db.models.functions.SHA224': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.SHA224',
'django.db.models.functions.SHA256': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.SHA256',
'django.db.models.functions.SHA384': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.SHA384',
'django.db.models.functions.SHA512': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.SHA512',
'django.db.models.functions.Sign': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Sign',
'django.db.models.functions.Sin': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Sin',
'django.db.models.functions.Sqrt': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Sqrt',
'django.db.models.functions.StrIndex': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.StrIndex',
'django.db.models.functions.Substr': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Substr',
'django.db.models.functions.Tan': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Tan',
'django.db.models.functions.Trim': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Trim',
'django.db.models.functions.Trunc': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Trunc',
'django.db.models.functions.TruncDate': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncDate',
'django.db.models.functions.TruncDay': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncDay',
'django.db.models.functions.TruncHour': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncHour',
'django.db.models.functions.TruncMinute': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncMinute',
'django.db.models.functions.TruncMonth': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncMonth',
'django.db.models.functions.TruncQuarter': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncQuarter',
'django.db.models.functions.TruncSecond': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncSecond',
'django.db.models.functions.TruncTime': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncTime',
'django.db.models.functions.TruncWeek': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncWeek',
'django.db.models.functions.TruncYear': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.TruncYear',
'django.db.models.functions.Upper': 'http://django.readthedocs.org/en/latest/ref/models/database-functions.html#django.db.models.functions.Upper'},
'django.db.models.lookups': { 'django.db.models.lookups.RegisterLookupMixin': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.lookups.RegisterLookupMixin',
'django.db.models.lookups.RegisterLookupMixin.get_lookup': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.lookups.RegisterLookupMixin.get_lookup',
'django.db.models.lookups.RegisterLookupMixin.get_lookups': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.lookups.RegisterLookupMixin.get_lookups',
'django.db.models.lookups.RegisterLookupMixin.get_transform': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.lookups.RegisterLookupMixin.get_transform',
'django.db.models.lookups.RegisterLookupMixin.register_lookup': 'http://django.readthedocs.org/en/latest/ref/models/lookups.html#django.db.models.lookups.RegisterLookupMixin.register_lookup'},
'django.db.models.options': { 'django.db.models.options.Options': 'http://django.readthedocs.org/en/latest/ref/models/meta.html#django.db.models.options.Options',
'django.db.models.options.Options.get_field': 'http://django.readthedocs.org/en/latest/ref/models/meta.html#django.db.models.options.Options.get_field',
'django.db.models.options.Options.get_fields': 'http://django.readthedocs.org/en/latest/ref/models/meta.html#django.db.models.options.Options.get_fields'},
'django.db.models.query': { 'django.db.models.query.QuerySet': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet',
'django.db.models.query.QuerySet.aaggregate': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aaggregate',
'django.db.models.query.QuerySet.abulk_create': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.abulk_create',
'django.db.models.query.QuerySet.abulk_update': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.abulk_update',
'django.db.models.query.QuerySet.acontains': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.acontains',
'django.db.models.query.QuerySet.acount': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.acount',
'django.db.models.query.QuerySet.acreate': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.acreate',
'django.db.models.query.QuerySet.adelete': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.adelete',
'django.db.models.query.QuerySet.aearliest': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aearliest',
'django.db.models.query.QuerySet.aexists': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aexists',
'django.db.models.query.QuerySet.aexplain': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aexplain',
'django.db.models.query.QuerySet.afirst': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.afirst',
'django.db.models.query.QuerySet.aget': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aget',
'django.db.models.query.QuerySet.aget_or_create': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aget_or_create',
'django.db.models.query.QuerySet.aggregate': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aggregate',
'django.db.models.query.QuerySet.ain_bulk': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.ain_bulk',
'django.db.models.query.QuerySet.aiterator': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aiterator',
'django.db.models.query.QuerySet.alast': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.alast',
'django.db.models.query.QuerySet.alatest': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.alatest',
'django.db.models.query.QuerySet.alias': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.alias',
'django.db.models.query.QuerySet.all': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.all',
'django.db.models.query.QuerySet.annotate': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.annotate',
'django.db.models.query.QuerySet.as_manager': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.as_manager',
'django.db.models.query.QuerySet.aupdate': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aupdate',
'django.db.models.query.QuerySet.aupdate_or_create': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.aupdate_or_create',
'django.db.models.query.QuerySet.bulk_create': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.bulk_create',
'django.db.models.query.QuerySet.bulk_update': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.bulk_update',
'django.db.models.query.QuerySet.contains': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.contains',
'django.db.models.query.QuerySet.count': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.count',
'django.db.models.query.QuerySet.create': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.create',
'django.db.models.query.QuerySet.dates': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.dates',
'django.db.models.query.QuerySet.datetimes': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.datetimes',
'django.db.models.query.QuerySet.defer': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.defer',
'django.db.models.query.QuerySet.delete': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.delete',
'django.db.models.query.QuerySet.difference': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.difference',
'django.db.models.query.QuerySet.distinct': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.distinct',
'django.db.models.query.QuerySet.earliest': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.earliest',
'django.db.models.query.QuerySet.exclude': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.exclude',
'django.db.models.query.QuerySet.exists': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.exists',
'django.db.models.query.QuerySet.explain': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.explain',
'django.db.models.query.QuerySet.extra': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.extra',
'django.db.models.query.QuerySet.filter': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.filter',
'django.db.models.query.QuerySet.first': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.first',
'django.db.models.query.QuerySet.get': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.get',
'django.db.models.query.QuerySet.get_or_create': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.get_or_create',
'django.db.models.query.QuerySet.in_bulk': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.in_bulk',
'django.db.models.query.QuerySet.intersection': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.intersection',
'django.db.models.query.QuerySet.iterator': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.iterator',
'django.db.models.query.QuerySet.last': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.last',
'django.db.models.query.QuerySet.latest': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.latest',
'django.db.models.query.QuerySet.none': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.none',
'django.db.models.query.QuerySet.only': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.only',
'django.db.models.query.QuerySet.order_by': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.order_by',
'django.db.models.query.QuerySet.prefetch_related': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.prefetch_related',
'django.db.models.query.QuerySet.raw': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.raw',
'django.db.models.query.QuerySet.reverse': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.reverse',
'django.db.models.query.QuerySet.select_for_update': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.select_for_update',
'django.db.models.query.QuerySet.select_related': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.select_related',
'django.db.models.query.QuerySet.union': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.union',
'django.db.models.query.QuerySet.update': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.update',
'django.db.models.query.QuerySet.update_or_create': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.update_or_create',
'django.db.models.query.QuerySet.using': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.using',
'django.db.models.query.QuerySet.values': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.values',
'django.db.models.query.QuerySet.values_list': 'http://django.readthedocs.org/en/latest/ref/models/querysets.html#django.db.models.query.QuerySet.values_list'},
'django.dispatch': { 'django.dispatch.Signal': 'http://django.readthedocs.org/en/latest/topics/signals.html#django.dispatch.Signal',
'django.dispatch.Signal.connect': 'http://django.readthedocs.org/en/latest/topics/signals.html#django.dispatch.Signal.connect',
'django.dispatch.Signal.disconnect': 'http://django.readthedocs.org/en/latest/topics/signals.html#django.dispatch.Signal.disconnect',
'django.dispatch.Signal.send': 'http://django.readthedocs.org/en/latest/topics/signals.html#django.dispatch.Signal.send',
'django.dispatch.Signal.send_robust': 'http://django.readthedocs.org/en/latest/topics/signals.html#django.dispatch.Signal.send_robust',
'django.dispatch.receiver': 'http://django.readthedocs.org/en/latest/topics/signals.html#django.dispatch.receiver'},
'django.forms.formsets': { 'django.forms.formsets.BaseFormSet': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet',
'django.forms.formsets.BaseFormSet.as_p': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.as_p',
'django.forms.formsets.BaseFormSet.as_table': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.as_table',
'django.forms.formsets.BaseFormSet.as_ul': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.as_ul',
'django.forms.formsets.BaseFormSet.get_context': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.get_context',
'django.forms.formsets.BaseFormSet.get_deletion_widget': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.get_deletion_widget',
'django.forms.formsets.BaseFormSet.get_ordering_widget': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.get_ordering_widget',
'django.forms.formsets.BaseFormSet.render': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.render',
'django.forms.formsets.BaseFormSet.total_error_count': 'http://django.readthedocs.org/en/latest/topics/forms/formsets.html#django.forms.formsets.BaseFormSet.total_error_count',
'django.forms.formsets.formset_factory': 'http://django.readthedocs.org/en/latest/ref/forms/formsets.html#django.forms.formsets.formset_factory'},
'django.forms.models': { 'django.forms.models.BaseInlineFormSet': 'http://django.readthedocs.org/en/latest/topics/forms/modelforms.html#django.forms.models.BaseInlineFormSet',
'django.forms.models.BaseModelFormSet': 'http://django.readthedocs.org/en/latest/topics/forms/modelforms.html#django.forms.models.BaseModelFormSet',
'django.forms.models.inlineformset_factory': 'http://django.readthedocs.org/en/latest/ref/forms/models.html#django.forms.models.inlineformset_factory',
'django.forms.models.modelform_factory': 'http://django.readthedocs.org/en/latest/ref/forms/models.html#django.forms.models.modelform_factory',
'django.forms.models.modelformset_factory': 'http://django.readthedocs.org/en/latest/ref/forms/models.html#django.forms.models.modelformset_factory'},
'django.forms.renderers': { 'django.forms.renderers.BaseRenderer': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.BaseRenderer',
'django.forms.renderers.DjangoDivFormRenderer': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.DjangoDivFormRenderer',
'django.forms.renderers.DjangoTemplates': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.DjangoTemplates',
'django.forms.renderers.Jinja2': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.Jinja2',
'django.forms.renderers.Jinja2DivFormRenderer': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.Jinja2DivFormRenderer',
'django.forms.renderers.TemplatesSetting': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.TemplatesSetting',
'django.forms.renderers.BaseRenderer.get_template': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.BaseRenderer.get_template',
'django.forms.renderers.BaseRenderer.render': 'http://django.readthedocs.org/en/latest/ref/forms/renderers.html#django.forms.renderers.BaseRenderer.render'},
'django.http': { 'django.http.FileResponse': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.FileResponse',
'django.http.Http404': 'http://django.readthedocs.org/en/latest/topics/http/views.html#django.http.Http404',
'django.http.HttpRequest': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest',
'django.http.HttpResponse': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse',
'django.http.HttpResponseBadRequest': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseBadRequest',
'django.http.HttpResponseBase': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseBase',
'django.http.HttpResponseForbidden': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseForbidden',
'django.http.HttpResponseGone': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseGone',
'django.http.HttpResponseNotAllowed': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseNotAllowed',
'django.http.HttpResponseNotFound': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseNotFound',
'django.http.HttpResponseNotModified': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseNotModified',
'django.http.HttpResponsePermanentRedirect': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponsePermanentRedirect',
'django.http.HttpResponseRedirect': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseRedirect',
'django.http.HttpResponseServerError': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponseServerError',
'django.http.JsonResponse': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.JsonResponse',
'django.http.QueryDict': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict',
'django.http.StreamingHttpResponse': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.StreamingHttpResponse',
'django.http.FileResponse.set_headers': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.FileResponse.set_headers',
'django.http.HttpRequest.__iter__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.__iter__',
'django.http.HttpRequest.accepts': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.accepts',
'django.http.HttpRequest.build_absolute_uri': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.build_absolute_uri',
'django.http.HttpRequest.get_full_path': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.get_full_path',
'django.http.HttpRequest.get_full_path_info': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.get_full_path_info',
'django.http.HttpRequest.get_host': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.get_host',
'django.http.HttpRequest.get_port': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.get_port',
'django.http.HttpRequest.get_signed_cookie': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.get_signed_cookie',
'django.http.HttpRequest.is_secure': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.is_secure',
'django.http.HttpRequest.read': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.read',
'django.http.HttpRequest.readline': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.readline',
'django.http.HttpRequest.readlines': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpRequest.readlines',
'django.http.HttpResponse.__delitem__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.__delitem__',
'django.http.HttpResponse.__getitem__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.__getitem__',
'django.http.HttpResponse.__init__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.__init__',
'django.http.HttpResponse.__setitem__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.__setitem__',
'django.http.HttpResponse.close': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.close',
'django.http.HttpResponse.delete_cookie': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.delete_cookie',
'django.http.HttpResponse.flush': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.flush',
'django.http.HttpResponse.get': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.get',
'django.http.HttpResponse.getvalue': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.getvalue',
'django.http.HttpResponse.has_header': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.has_header',
'django.http.HttpResponse.items': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.items',
'django.http.HttpResponse.readable': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.readable',
'django.http.HttpResponse.seekable': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.seekable',
'django.http.HttpResponse.set_cookie': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.set_cookie',
'django.http.HttpResponse.set_signed_cookie': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.set_signed_cookie',
'django.http.HttpResponse.setdefault': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.setdefault',
'django.http.HttpResponse.tell': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.tell',
'django.http.HttpResponse.writable': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.writable',
'django.http.HttpResponse.write': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.write',
'django.http.HttpResponse.writelines': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.HttpResponse.writelines',
'django.http.QueryDict.__contains__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.__contains__',
'django.http.QueryDict.__getitem__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.__getitem__',
'django.http.QueryDict.__init__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.__init__',
'django.http.QueryDict.__setitem__': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.__setitem__',
'django.http.QueryDict.appendlist': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.appendlist',
'django.http.QueryDict.copy': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.copy',
'django.http.QueryDict.dict': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.dict',
'django.http.QueryDict.fromkeys': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.fromkeys',
'django.http.QueryDict.get': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.get',
'django.http.QueryDict.getlist': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.getlist',
'django.http.QueryDict.items': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.items',
'django.http.QueryDict.lists': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.lists',
'django.http.QueryDict.pop': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.pop',
'django.http.QueryDict.popitem': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.popitem',
'django.http.QueryDict.setdefault': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.setdefault',
'django.http.QueryDict.setlist': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.setlist',
'django.http.QueryDict.setlistdefault': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.setlistdefault',
'django.http.QueryDict.update': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.update',
'django.http.QueryDict.urlencode': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.urlencode',
'django.http.QueryDict.values': 'http://django.readthedocs.org/en/latest/ref/request-response.html#django.http.QueryDict.values'},
'django.middleware.cache': { 'django.middleware.cache.FetchFromCacheMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.cache.FetchFromCacheMiddleware',
'django.middleware.cache.UpdateCacheMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.cache.UpdateCacheMiddleware'},
'django.middleware.clickjacking': { 'django.middleware.clickjacking.XFrameOptionsMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.clickjacking.XFrameOptionsMiddleware'},
'django.middleware.common': { 'django.middleware.common.BrokenLinkEmailsMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.common.BrokenLinkEmailsMiddleware',
'django.middleware.common.CommonMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.common.CommonMiddleware'},
'django.middleware.csrf': { 'django.middleware.csrf.CsrfViewMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.csrf.CsrfViewMiddleware'},
'django.middleware.gzip': { 'django.middleware.gzip.GZipMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.gzip.GZipMiddleware'},
'django.middleware.http': { 'django.middleware.http.ConditionalGetMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.http.ConditionalGetMiddleware'},
'django.middleware.locale': { 'django.middleware.locale.LocaleMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.locale.LocaleMiddleware'},
'django.middleware.security': { 'django.middleware.security.SecurityMiddleware': 'http://django.readthedocs.org/en/latest/ref/middleware.html#django.middleware.security.SecurityMiddleware'},
'django.template.backends.django': { 'django.template.backends.django.DjangoTemplates': 'http://django.readthedocs.org/en/latest/topics/templates.html#django.template.backends.django.DjangoTemplates'},
'django.template.backends.jinja2': { 'django.template.backends.jinja2.Jinja2': 'http://django.readthedocs.org/en/latest/topics/templates.html#django.template.backends.jinja2.Jinja2'},
'django.template.base': { 'django.template.base.Origin': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.base.Origin'},
'django.template.loaders.app_directories': { 'django.template.loaders.app_directories.Loader': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.app_directories.Loader'},
'django.template.loaders.base': { 'django.template.loaders.base.Loader': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.base.Loader',
'django.template.loaders.base.Loader.get_contents': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.base.Loader.get_contents',
'django.template.loaders.base.Loader.get_template': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.base.Loader.get_template',
'django.template.loaders.base.Loader.get_template_sources': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.base.Loader.get_template_sources'},
'django.template.loaders.cached': { 'django.template.loaders.cached.Loader': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.cached.Loader'},
'django.template.loaders.filesystem': { 'django.template.loaders.filesystem.Loader': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.filesystem.Loader'},
'django.template.loaders.locmem': { 'django.template.loaders.locmem.Loader': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.loaders.locmem.Loader'},
'django.template.response': { 'django.template.response.SimpleTemplateResponse': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.SimpleTemplateResponse',
'django.template.response.TemplateResponse': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.TemplateResponse',
'django.template.response.SimpleTemplateResponse.__init__': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.SimpleTemplateResponse.__init__',
'django.template.response.SimpleTemplateResponse.add_post_render_callback': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.SimpleTemplateResponse.add_post_render_callback',
'django.template.response.SimpleTemplateResponse.render': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.SimpleTemplateResponse.render',
'django.template.response.SimpleTemplateResponse.resolve_context': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.SimpleTemplateResponse.resolve_context',
'django.template.response.SimpleTemplateResponse.resolve_template': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.SimpleTemplateResponse.resolve_template',
'django.template.response.TemplateResponse.__init__': 'http://django.readthedocs.org/en/latest/ref/template-response.html#django.template.response.TemplateResponse.__init__'},
'django.test.runner': { 'django.test.runner.DiscoverRunner': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner',
'django.test.runner.DiscoverRunner.add_arguments': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.add_arguments',
'django.test.runner.DiscoverRunner.build_suite': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.build_suite',
'django.test.runner.DiscoverRunner.get_test_runner_kwargs': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.get_test_runner_kwargs',
'django.test.runner.DiscoverRunner.log': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.log',
'django.test.runner.DiscoverRunner.run_checks': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.run_checks',
'django.test.runner.DiscoverRunner.run_suite': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.run_suite',
'django.test.runner.DiscoverRunner.run_tests': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.run_tests',
'django.test.runner.DiscoverRunner.setup_databases': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.setup_databases',
'django.test.runner.DiscoverRunner.setup_test_environment': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.setup_test_environment',
'django.test.runner.DiscoverRunner.suite_result': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.suite_result',
'django.test.runner.DiscoverRunner.teardown_databases': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.teardown_databases',
'django.test.runner.DiscoverRunner.teardown_test_environment': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.runner.DiscoverRunner.teardown_test_environment'},
'django.utils.deprecation': { 'django.utils.deprecation.MiddlewareMixin': 'http://django.readthedocs.org/en/latest/topics/http/middleware.html#django.utils.deprecation.MiddlewareMixin'},
'django.utils.feedgenerator': { 'django.utils.feedgenerator.Atom1Feed': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.Atom1Feed',
'django.utils.feedgenerator.Enclosure': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.Enclosure',
'django.utils.feedgenerator.Rss201rev2Feed': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.Rss201rev2Feed',
'django.utils.feedgenerator.RssFeed': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.RssFeed',
'django.utils.feedgenerator.RssUserland091Feed': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.RssUserland091Feed',
'django.utils.feedgenerator.SyndicationFeed': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed',
'django.utils.feedgenerator.SyndicationFeed.__init__': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.__init__',
'django.utils.feedgenerator.SyndicationFeed.add_item': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.add_item',
'django.utils.feedgenerator.SyndicationFeed.add_item_elements': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.add_item_elements',
'django.utils.feedgenerator.SyndicationFeed.add_root_elements': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.add_root_elements',
'django.utils.feedgenerator.SyndicationFeed.item_attributes': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.item_attributes',
'django.utils.feedgenerator.SyndicationFeed.latest_post_date': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.latest_post_date',
'django.utils.feedgenerator.SyndicationFeed.num_items': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.num_items',
'django.utils.feedgenerator.SyndicationFeed.root_attributes': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.root_attributes',
'django.utils.feedgenerator.SyndicationFeed.write': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.write',
'django.utils.feedgenerator.SyndicationFeed.writeString': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.SyndicationFeed.writeString',
'django.utils.feedgenerator.get_tag_uri': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.feedgenerator.get_tag_uri'},
'django.utils.functional': { 'django.utils.functional.cached_property': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.functional.cached_property',
'django.utils.functional.classproperty': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.functional.classproperty',
'django.utils.functional.keep_lazy': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.functional.keep_lazy',
'django.utils.functional.keep_lazy_text': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.functional.keep_lazy_text'},
'django.utils.log': { 'django.utils.log.AdminEmailHandler': 'http://django.readthedocs.org/en/latest/ref/logging.html#django.utils.log.AdminEmailHandler',
'django.utils.log.CallbackFilter': 'http://django.readthedocs.org/en/latest/ref/logging.html#django.utils.log.CallbackFilter',
'django.utils.log.RequireDebugFalse': 'http://django.readthedocs.org/en/latest/ref/logging.html#django.utils.log.RequireDebugFalse',
'django.utils.log.RequireDebugTrue': 'http://django.readthedocs.org/en/latest/ref/logging.html#django.utils.log.RequireDebugTrue',
'django.utils.log.AdminEmailHandler.send_mail': 'http://django.readthedocs.org/en/latest/ref/logging.html#django.utils.log.AdminEmailHandler.send_mail'},
'django.utils.safestring': { 'django.utils.safestring.SafeString': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.safestring.SafeString',
'django.utils.safestring.mark_safe': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.safestring.mark_safe'},
'django.views.debug': { 'django.views.debug.ExceptionReporter': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.ExceptionReporter',
'django.views.debug.SafeExceptionReporterFilter': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.SafeExceptionReporterFilter',
'django.views.debug.ExceptionReporter.get_traceback_data': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.ExceptionReporter.get_traceback_data',
'django.views.debug.ExceptionReporter.get_traceback_html': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.ExceptionReporter.get_traceback_html',
'django.views.debug.ExceptionReporter.get_traceback_text': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.ExceptionReporter.get_traceback_text',
'django.views.debug.SafeExceptionReporterFilter.get_post_parameters': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.SafeExceptionReporterFilter.get_post_parameters',
'django.views.debug.SafeExceptionReporterFilter.get_traceback_frame_variables': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.SafeExceptionReporterFilter.get_traceback_frame_variables',
'django.views.debug.SafeExceptionReporterFilter.is_active': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.debug.SafeExceptionReporterFilter.is_active'},
'django.views.generic.base': { 'django.views.generic.base.ContextMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-simple.html#django.views.generic.base.ContextMixin',
'django.views.generic.base.RedirectView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.RedirectView',
'django.views.generic.base.TemplateResponseMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-simple.html#django.views.generic.base.TemplateResponseMixin',
'django.views.generic.base.TemplateView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.TemplateView',
'django.views.generic.base.View': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.View',
'django.views.generic.base.ContextMixin.get_context_data': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-simple.html#django.views.generic.base.ContextMixin.get_context_data',
'django.views.generic.base.RedirectView.get_redirect_url': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.RedirectView.get_redirect_url',
'django.views.generic.base.TemplateResponseMixin.get_template_names': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-simple.html#django.views.generic.base.TemplateResponseMixin.get_template_names',
'django.views.generic.base.TemplateResponseMixin.render_to_response': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-simple.html#django.views.generic.base.TemplateResponseMixin.render_to_response',
'django.views.generic.base.View.as_view': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.View.as_view',
'django.views.generic.base.View.dispatch': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.View.dispatch',
'django.views.generic.base.View.http_method_not_allowed': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.View.http_method_not_allowed',
'django.views.generic.base.View.options': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.View.options',
'django.views.generic.base.View.setup': 'http://django.readthedocs.org/en/latest/ref/class-based-views/base.html#django.views.generic.base.View.setup'},
'django.views.generic.dates': { 'django.views.generic.dates.ArchiveIndexView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.ArchiveIndexView',
'django.views.generic.dates.BaseArchiveIndexView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.BaseArchiveIndexView',
'django.views.generic.dates.BaseDateDetailView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.BaseDateDetailView',
'django.views.generic.dates.BaseDateListView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.BaseDateListView',
'django.views.generic.dates.BaseDayArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.BaseDayArchiveView',
'django.views.generic.dates.BaseMonthArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.BaseMonthArchiveView',
'django.views.generic.dates.BaseTodayArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.BaseTodayArchiveView',
'django.views.generic.dates.BaseWeekArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.BaseWeekArchiveView',
'django.views.generic.dates.BaseYearArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.BaseYearArchiveView',
'django.views.generic.dates.DateDetailView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.DateDetailView',
'django.views.generic.dates.DateMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DateMixin',
'django.views.generic.dates.DayArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.DayArchiveView',
'django.views.generic.dates.DayMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DayMixin',
'django.views.generic.dates.MonthArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.MonthArchiveView',
'django.views.generic.dates.MonthMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.MonthMixin',
'django.views.generic.dates.TodayArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.TodayArchiveView',
'django.views.generic.dates.WeekArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.WeekArchiveView',
'django.views.generic.dates.WeekMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.WeekMixin',
'django.views.generic.dates.YearArchiveView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.YearArchiveView',
'django.views.generic.dates.YearMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.YearMixin',
'django.views.generic.dates.BaseDateListView.get_date_list': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.BaseDateListView.get_date_list',
'django.views.generic.dates.BaseDateListView.get_date_list_period': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.BaseDateListView.get_date_list_period',
'django.views.generic.dates.BaseDateListView.get_dated_items': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.BaseDateListView.get_dated_items',
'django.views.generic.dates.BaseDateListView.get_dated_queryset': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.BaseDateListView.get_dated_queryset',
'django.views.generic.dates.DateMixin.get_allow_future': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DateMixin.get_allow_future',
'django.views.generic.dates.DateMixin.get_date_field': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DateMixin.get_date_field',
'django.views.generic.dates.DayMixin.get_day': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DayMixin.get_day',
'django.views.generic.dates.DayMixin.get_day_format': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DayMixin.get_day_format',
'django.views.generic.dates.DayMixin.get_next_day': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DayMixin.get_next_day',
'django.views.generic.dates.DayMixin.get_previous_day': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.DayMixin.get_previous_day',
'django.views.generic.dates.MonthMixin.get_month': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.MonthMixin.get_month',
'django.views.generic.dates.MonthMixin.get_month_format': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.MonthMixin.get_month_format',
'django.views.generic.dates.MonthMixin.get_next_month': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.MonthMixin.get_next_month',
'django.views.generic.dates.MonthMixin.get_previous_month': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.MonthMixin.get_previous_month',
'django.views.generic.dates.WeekMixin.get_next_week': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.WeekMixin.get_next_week',
'django.views.generic.dates.WeekMixin.get_prev_week': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.WeekMixin.get_prev_week',
'django.views.generic.dates.WeekMixin.get_week': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.WeekMixin.get_week',
'django.views.generic.dates.WeekMixin.get_week_format': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.WeekMixin.get_week_format',
'django.views.generic.dates.YearArchiveView.get_make_object_list': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-date-based.html#django.views.generic.dates.YearArchiveView.get_make_object_list',
'django.views.generic.dates.YearMixin.get_next_year': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.YearMixin.get_next_year',
'django.views.generic.dates.YearMixin.get_previous_year': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.YearMixin.get_previous_year',
'django.views.generic.dates.YearMixin.get_year': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.YearMixin.get_year',
'django.views.generic.dates.YearMixin.get_year_format': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-date-based.html#django.views.generic.dates.YearMixin.get_year_format'},
'django.views.generic.detail': { 'django.views.generic.detail.BaseDetailView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-display.html#django.views.generic.detail.BaseDetailView',
'django.views.generic.detail.DetailView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-display.html#django.views.generic.detail.DetailView',
'django.views.generic.detail.SingleObjectMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectMixin',
'django.views.generic.detail.SingleObjectTemplateResponseMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectTemplateResponseMixin',
'django.views.generic.detail.BaseDetailView.get': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-display.html#django.views.generic.detail.BaseDetailView.get',
'django.views.generic.detail.SingleObjectMixin.get_context_data': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectMixin.get_context_data',
'django.views.generic.detail.SingleObjectMixin.get_context_object_name': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectMixin.get_context_object_name',
'django.views.generic.detail.SingleObjectMixin.get_object': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectMixin.get_object',
'django.views.generic.detail.SingleObjectMixin.get_queryset': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectMixin.get_queryset',
'django.views.generic.detail.SingleObjectMixin.get_slug_field': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectMixin.get_slug_field',
'django.views.generic.detail.SingleObjectTemplateResponseMixin.get_template_names': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-single-object.html#django.views.generic.detail.SingleObjectTemplateResponseMixin.get_template_names'},
'django.views.generic.edit': { 'django.views.generic.edit.BaseCreateView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseCreateView',
'django.views.generic.edit.BaseDeleteView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseDeleteView',
'django.views.generic.edit.BaseFormView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseFormView',
'django.views.generic.edit.BaseUpdateView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseUpdateView',
'django.views.generic.edit.CreateView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.CreateView',
'django.views.generic.edit.DeleteView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.DeleteView',
'django.views.generic.edit.DeletionMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.DeletionMixin',
'django.views.generic.edit.FormMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin',
'django.views.generic.edit.FormView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.FormView',
'django.views.generic.edit.ModelFormMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ModelFormMixin',
'django.views.generic.edit.ProcessFormView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ProcessFormView',
'django.views.generic.edit.UpdateView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.UpdateView',
'django.views.generic.edit.BaseCreateView.get': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseCreateView.get',
'django.views.generic.edit.BaseCreateView.post': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseCreateView.post',
'django.views.generic.edit.BaseUpdateView.get': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseUpdateView.get',
'django.views.generic.edit.BaseUpdateView.post': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-editing.html#django.views.generic.edit.BaseUpdateView.post',
'django.views.generic.edit.DeletionMixin.delete': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.DeletionMixin.delete',
'django.views.generic.edit.DeletionMixin.get_success_url': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.DeletionMixin.get_success_url',
'django.views.generic.edit.FormMixin.form_invalid': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.form_invalid',
'django.views.generic.edit.FormMixin.form_valid': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.form_valid',
'django.views.generic.edit.FormMixin.get_context_data': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.get_context_data',
'django.views.generic.edit.FormMixin.get_form': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.get_form',
'django.views.generic.edit.FormMixin.get_form_class': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.get_form_class',
'django.views.generic.edit.FormMixin.get_form_kwargs': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.get_form_kwargs',
'django.views.generic.edit.FormMixin.get_initial': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.get_initial',
'django.views.generic.edit.FormMixin.get_prefix': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.get_prefix',
'django.views.generic.edit.FormMixin.get_success_url': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.FormMixin.get_success_url',
'django.views.generic.edit.ModelFormMixin.form_invalid': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ModelFormMixin.form_invalid',
'django.views.generic.edit.ModelFormMixin.form_valid': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ModelFormMixin.form_valid',
'django.views.generic.edit.ModelFormMixin.get_form_class': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ModelFormMixin.get_form_class',
'django.views.generic.edit.ModelFormMixin.get_form_kwargs': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ModelFormMixin.get_form_kwargs',
'django.views.generic.edit.ModelFormMixin.get_success_url': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ModelFormMixin.get_success_url',
'django.views.generic.edit.ProcessFormView.get': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ProcessFormView.get',
'django.views.generic.edit.ProcessFormView.post': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ProcessFormView.post',
'django.views.generic.edit.ProcessFormView.put': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-editing.html#django.views.generic.edit.ProcessFormView.put'},
'django.views.generic.list': { 'django.views.generic.list.BaseListView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-display.html#django.views.generic.list.BaseListView',
'django.views.generic.list.ListView': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-display.html#django.views.generic.list.ListView',
'django.views.generic.list.MultipleObjectMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin',
'django.views.generic.list.MultipleObjectTemplateResponseMixin': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectTemplateResponseMixin',
'django.views.generic.list.BaseListView.get': 'http://django.readthedocs.org/en/latest/ref/class-based-views/generic-display.html#django.views.generic.list.BaseListView.get',
'django.views.generic.list.MultipleObjectMixin.get_allow_empty': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_allow_empty',
'django.views.generic.list.MultipleObjectMixin.get_context_data': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_context_data',
'django.views.generic.list.MultipleObjectMixin.get_context_object_name': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_context_object_name',
'django.views.generic.list.MultipleObjectMixin.get_ordering': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_ordering',
'django.views.generic.list.MultipleObjectMixin.get_paginate_by': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_paginate_by',
'django.views.generic.list.MultipleObjectMixin.get_paginate_orphans': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_paginate_orphans',
'django.views.generic.list.MultipleObjectMixin.get_paginator': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_paginator',
'django.views.generic.list.MultipleObjectMixin.get_queryset': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.get_queryset',
'django.views.generic.list.MultipleObjectMixin.paginate_queryset': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectMixin.paginate_queryset',
'django.views.generic.list.MultipleObjectTemplateResponseMixin.get_template_names': 'http://django.readthedocs.org/en/latest/ref/class-based-views/mixins-multiple-object.html#django.views.generic.list.MultipleObjectTemplateResponseMixin.get_template_names'},
'django.views.i18n': { 'django.views.i18n.JSONCatalog': 'http://django.readthedocs.org/en/latest/topics/i18n/translation.html#django.views.i18n.JSONCatalog',
'django.views.i18n.JavaScriptCatalog': 'http://django.readthedocs.org/en/latest/topics/i18n/translation.html#django.views.i18n.JavaScriptCatalog',
'django.views.i18n.set_language': 'http://django.readthedocs.org/en/latest/topics/i18n/translation.html#django.views.i18n.set_language'},
'django.contrib.syndication': { 'django.contrib.syndication.Feed.get_context_data': 'http://django.readthedocs.org/en/latest/ref/contrib/syndication.html#django.contrib.syndication.Feed.get_context_data'},
'django.core.caches': { 'django.core.caches.cache.add': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.add',
'django.core.caches.cache.clear': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.clear',
'django.core.caches.cache.close': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.close',
'django.core.caches.cache.decr': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.decr',
'django.core.caches.cache.delete': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.delete',
'django.core.caches.cache.delete_many': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.delete_many',
'django.core.caches.cache.get': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.get',
'django.core.caches.cache.get_many': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.get_many',
'django.core.caches.cache.get_or_set': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.get_or_set',
'django.core.caches.cache.incr': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.incr',
'django.core.caches.cache.set': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.set',
'django.core.caches.cache.set_many': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.set_many',
'django.core.caches.cache.touch': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.caches.cache.touch'},
'django.forms.Form': { 'django.forms.Form.errors.as_data': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.errors.as_data',
'django.forms.Form.errors.as_json': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.errors.as_json',
'django.forms.Form.errors.get_json_data': 'http://django.readthedocs.org/en/latest/ref/forms/api.html#django.forms.Form.errors.get_json_data'},
'django.template.backends.base': { 'django.template.backends.base.Template.render': 'http://django.readthedocs.org/en/latest/topics/templates.html#django.template.backends.base.Template.render'},
'asgiref.sync': { 'asgiref.sync.async_to_sync': 'http://django.readthedocs.org/en/latest/topics/async.html#asgiref.sync.async_to_sync',
'asgiref.sync.sync_to_async': 'http://django.readthedocs.org/en/latest/topics/async.html#asgiref.sync.sync_to_async'},
'django.conf.settings': { 'django.conf.settings.configure': 'http://django.readthedocs.org/en/latest/topics/settings.html#django.conf.settings.configure'},
'django.conf.urls.i18n': { 'django.conf.urls.i18n.i18n_patterns': 'http://django.readthedocs.org/en/latest/topics/i18n/translation.html#django.conf.urls.i18n.i18n_patterns'},
'django.conf.urls.static': { 'django.conf.urls.static.static': 'http://django.readthedocs.org/en/latest/ref/urls.html#django.conf.urls.static.static'},
'django.contrib.admin.views.decorators': { 'django.contrib.admin.views.decorators.staff_member_required': 'http://django.readthedocs.org/en/latest/ref/contrib/admin/index.html#django.contrib.admin.views.decorators.staff_member_required'},
'django.contrib.auth.context_processors': { 'django.contrib.auth.context_processors.auth': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.contrib.auth.context_processors.auth'},
'django.contrib.auth.decorators': { 'django.contrib.auth.decorators.login_required': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.decorators.login_required',
'django.contrib.auth.decorators.permission_required': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.decorators.permission_required',
'django.contrib.auth.decorators.user_passes_test': 'http://django.readthedocs.org/en/latest/topics/auth/default.html#django.contrib.auth.decorators.user_passes_test'},
'django.contrib.auth.hashers': { 'django.contrib.auth.hashers.check_password': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.hashers.check_password',
'django.contrib.auth.hashers.is_password_usable': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.hashers.is_password_usable',
'django.contrib.auth.hashers.make_password': 'http://django.readthedocs.org/en/latest/topics/auth/passwords.html#django.contrib.auth.hashers.make_password'},
'django.contrib.sitemaps.views': { 'django.contrib.sitemaps.views.index': 'http://django.readthedocs.org/en/latest/ref/contrib/sitemaps.html#django.contrib.sitemaps.views.index',
'django.contrib.sitemaps.views.sitemap': 'http://django.readthedocs.org/en/latest/ref/contrib/sitemaps.html#django.contrib.sitemaps.views.sitemap'},
'django.contrib.sites.shortcuts': { 'django.contrib.sites.shortcuts.get_current_site': 'http://django.readthedocs.org/en/latest/ref/contrib/sites.html#django.contrib.sites.shortcuts.get_current_site'},
'django.contrib.staticfiles.urls': { 'django.contrib.staticfiles.urls.staticfiles_urlpatterns': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.urls.staticfiles_urlpatterns'},
'django.contrib.staticfiles.views': { 'django.contrib.staticfiles.views.serve': 'http://django.readthedocs.org/en/latest/ref/contrib/staticfiles.html#django.contrib.staticfiles.views.serve'},
'django.core.cache.utils': { 'django.core.cache.utils.make_template_fragment_key': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.core.cache.utils.make_template_fragment_key'},
'django.core.serializers': { 'django.core.serializers.get_serializer': 'http://django.readthedocs.org/en/latest/topics/serialization.html#django.core.serializers.get_serializer'},
'django.db.connection.creation': { 'django.db.connection.creation.create_test_db': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.db.connection.creation.create_test_db',
'django.db.connection.creation.destroy_test_db': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.db.connection.creation.destroy_test_db'},
'django.db.transaction': { 'django.db.transaction.atomic': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.atomic',
'django.db.transaction.clean_savepoints': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.clean_savepoints',
'django.db.transaction.commit': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.commit',
'django.db.transaction.get_autocommit': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.get_autocommit',
'django.db.transaction.get_rollback': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.get_rollback',
'django.db.transaction.non_atomic_requests': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.non_atomic_requests',
'django.db.transaction.on_commit': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.on_commit',
'django.db.transaction.rollback': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.rollback',
'django.db.transaction.savepoint': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.savepoint',
'django.db.transaction.savepoint_commit': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.savepoint_commit',
'django.db.transaction.savepoint_rollback': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.savepoint_rollback',
'django.db.transaction.set_autocommit': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.set_autocommit',
'django.db.transaction.set_rollback': 'http://django.readthedocs.org/en/latest/topics/db/transactions.html#django.db.transaction.set_rollback'},
'django.shortcuts': { 'django.shortcuts.get_list_or_404': 'http://django.readthedocs.org/en/latest/topics/http/shortcuts.html#django.shortcuts.get_list_or_404',
'django.shortcuts.get_object_or_404': 'http://django.readthedocs.org/en/latest/topics/http/shortcuts.html#django.shortcuts.get_object_or_404',
'django.shortcuts.redirect': 'http://django.readthedocs.org/en/latest/topics/http/shortcuts.html#django.shortcuts.redirect',
'django.shortcuts.render': 'http://django.readthedocs.org/en/latest/topics/http/shortcuts.html#django.shortcuts.render'},
'django.template.context_processors': { 'django.template.context_processors.debug': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.context_processors.debug',
'django.template.context_processors.i18n': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.context_processors.i18n',
'django.template.context_processors.static': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.context_processors.static',
'django.template.context_processors.tz': 'http://django.readthedocs.org/en/latest/ref/templates/api.html#django.template.context_processors.tz'},
'django.template.loader': { 'django.template.loader.get_template': 'http://django.readthedocs.org/en/latest/topics/templates.html#django.template.loader.get_template',
'django.template.loader.render_to_string': 'http://django.readthedocs.org/en/latest/topics/templates.html#django.template.loader.render_to_string',
'django.template.loader.select_template': 'http://django.readthedocs.org/en/latest/topics/templates.html#django.template.loader.select_template'},
'django.test.utils': { 'django.test.utils.isolate_apps': 'http://django.readthedocs.org/en/latest/internals/contributing/writing-code/unit-tests.html#django.test.utils.isolate_apps',
'django.test.utils.setup_databases': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.utils.setup_databases',
'django.test.utils.setup_test_environment': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.utils.setup_test_environment',
'django.test.utils.teardown_databases': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.utils.teardown_databases',
'django.test.utils.teardown_test_environment': 'http://django.readthedocs.org/en/latest/topics/testing/advanced.html#django.test.utils.teardown_test_environment'},
'django.utils.cache': { 'django.utils.cache.add_never_cache_headers': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.cache.add_never_cache_headers',
'django.utils.cache.get_cache_key': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.cache.get_cache_key',
'django.utils.cache.get_max_age': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.cache.get_max_age',
'django.utils.cache.learn_cache_key': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.cache.learn_cache_key',
'django.utils.cache.patch_cache_control': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.cache.patch_cache_control',
'django.utils.cache.patch_response_headers': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.cache.patch_response_headers',
'django.utils.cache.patch_vary_headers': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.cache.patch_vary_headers'},
'django.utils.dateparse': { 'django.utils.dateparse.parse_date': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.dateparse.parse_date',
'django.utils.dateparse.parse_datetime': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.dateparse.parse_datetime',
'django.utils.dateparse.parse_duration': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.dateparse.parse_duration',
'django.utils.dateparse.parse_time': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.dateparse.parse_time'},
'django.utils.decorators': { 'django.utils.decorators.async_only_middleware': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.decorators.async_only_middleware',
'django.utils.decorators.decorator_from_middleware': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.decorators.decorator_from_middleware',
'django.utils.decorators.decorator_from_middleware_with_args': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.decorators.decorator_from_middleware_with_args',
'django.utils.decorators.method_decorator': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.decorators.method_decorator',
'django.utils.decorators.sync_and_async_middleware': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.decorators.sync_and_async_middleware',
'django.utils.decorators.sync_only_middleware': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.decorators.sync_only_middleware'},
'django.utils.encoding': { 'django.utils.encoding.escape_uri_path': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.escape_uri_path',
'django.utils.encoding.filepath_to_uri': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.filepath_to_uri',
'django.utils.encoding.force_bytes': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.force_bytes',
'django.utils.encoding.force_str': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.force_str',
'django.utils.encoding.iri_to_uri': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.iri_to_uri',
'django.utils.encoding.is_protected_type': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.is_protected_type',
'django.utils.encoding.smart_bytes': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.smart_bytes',
'django.utils.encoding.smart_str': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.smart_str',
'django.utils.encoding.uri_to_iri': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.encoding.uri_to_iri'},
'django.utils.html': { 'django.utils.html.conditional_escape': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.html.conditional_escape',
'django.utils.html.escape': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.html.escape',
'django.utils.html.format_html': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.html.format_html',
'django.utils.html.format_html_join': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.html.format_html_join',
'django.utils.html.html_safe': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.html.html_safe',
'django.utils.html.strip_tags': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.html.strip_tags'},
'django.utils.http': { 'django.utils.http.base36_to_int': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.http.base36_to_int',
'django.utils.http.http_date': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.http.http_date',
'django.utils.http.int_to_base36': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.http.int_to_base36',
'django.utils.http.urlencode': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.http.urlencode',
'django.utils.http.urlsafe_base64_decode': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.http.urlsafe_base64_decode',
'django.utils.http.urlsafe_base64_encode': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.http.urlsafe_base64_encode'},
'django.utils.module_loading': { 'django.utils.module_loading.import_string': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.module_loading.import_string'},
'django.utils.text': { 'django.utils.text.format_lazy': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.text.format_lazy',
'django.utils.text.slugify': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.text.slugify'},
'django.utils.timezone': { 'django.utils.timezone.activate': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.activate',
'django.utils.timezone.deactivate': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.deactivate',
'django.utils.timezone.get_current_timezone': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.get_current_timezone',
'django.utils.timezone.get_current_timezone_name': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.get_current_timezone_name',
'django.utils.timezone.get_default_timezone': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.get_default_timezone',
'django.utils.timezone.get_default_timezone_name': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.get_default_timezone_name',
'django.utils.timezone.get_fixed_timezone': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.get_fixed_timezone',
'django.utils.timezone.is_aware': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.is_aware',
'django.utils.timezone.is_naive': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.is_naive',
'django.utils.timezone.localdate': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.localdate',
'django.utils.timezone.localtime': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.localtime',
'django.utils.timezone.make_aware': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.make_aware',
'django.utils.timezone.make_naive': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.make_naive',
'django.utils.timezone.now': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.now',
'django.utils.timezone.override': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.timezone.override'},
'django.utils.translation': { 'django.utils.translation.activate': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.activate',
'django.utils.translation.check_for_language': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.check_for_language',
'django.utils.translation.deactivate': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.deactivate',
'django.utils.translation.deactivate_all': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.deactivate_all',
'django.utils.translation.get_language': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.get_language',
'django.utils.translation.get_language_bidi': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.get_language_bidi',
'django.utils.translation.get_language_from_request': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.get_language_from_request',
'django.utils.translation.get_language_info': 'http://django.readthedocs.org/en/latest/topics/i18n/translation.html#django.utils.translation.get_language_info',
'django.utils.translation.get_supported_language_variant': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.get_supported_language_variant',
'django.utils.translation.gettext': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.gettext',
'django.utils.translation.gettext_lazy': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.gettext_lazy',
'django.utils.translation.gettext_noop': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.gettext_noop',
'django.utils.translation.ngettext': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.ngettext',
'django.utils.translation.ngettext_lazy': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.ngettext_lazy',
'django.utils.translation.npgettext': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.npgettext',
'django.utils.translation.npgettext_lazy': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.npgettext_lazy',
'django.utils.translation.override': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.override',
'django.utils.translation.pgettext': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.pgettext',
'django.utils.translation.pgettext_lazy': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.pgettext_lazy',
'django.utils.translation.templatize': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.templatize',
'django.utils.translation.to_locale': 'http://django.readthedocs.org/en/latest/ref/utils.html#django.utils.translation.to_locale'},
'django.views.decorators.cache': { 'django.views.decorators.cache.cache_control': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.cache.cache_control',
'django.views.decorators.cache.cache_page': 'http://django.readthedocs.org/en/latest/topics/cache.html#django.views.decorators.cache.cache_page',
'django.views.decorators.cache.never_cache': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.cache.never_cache'},
'django.views.decorators.common': { 'django.views.decorators.common.no_append_slash': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.common.no_append_slash'},
'django.views.decorators.csrf': { 'django.views.decorators.csrf.csrf_exempt': 'http://django.readthedocs.org/en/latest/ref/csrf.html#django.views.decorators.csrf.csrf_exempt',
'django.views.decorators.csrf.csrf_protect': 'http://django.readthedocs.org/en/latest/ref/csrf.html#django.views.decorators.csrf.csrf_protect',
'django.views.decorators.csrf.ensure_csrf_cookie': 'http://django.readthedocs.org/en/latest/ref/csrf.html#django.views.decorators.csrf.ensure_csrf_cookie',
'django.views.decorators.csrf.requires_csrf_token': 'http://django.readthedocs.org/en/latest/ref/csrf.html#django.views.decorators.csrf.requires_csrf_token'},
'django.views.decorators.debug': { 'django.views.decorators.debug.sensitive_post_parameters': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.decorators.debug.sensitive_post_parameters',
'django.views.decorators.debug.sensitive_variables': 'http://django.readthedocs.org/en/latest/howto/error-reporting.html#django.views.decorators.debug.sensitive_variables'},
'django.views.decorators.gzip': { 'django.views.decorators.gzip.gzip_page': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.gzip.gzip_page'},
'django.views.decorators.http': { 'django.views.decorators.http.condition': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.http.condition',
'django.views.decorators.http.etag': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.http.etag',
'django.views.decorators.http.last_modified': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.http.last_modified',
'django.views.decorators.http.require_GET': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.http.require_GET',
'django.views.decorators.http.require_POST': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.http.require_POST',
'django.views.decorators.http.require_http_methods': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.http.require_http_methods',
'django.views.decorators.http.require_safe': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.http.require_safe'},
'django.views.decorators.vary': { 'django.views.decorators.vary.vary_on_cookie': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.vary.vary_on_cookie',
'django.views.decorators.vary.vary_on_headers': 'http://django.readthedocs.org/en/latest/topics/http/decorators.html#django.views.decorators.vary.vary_on_headers'},
'django.views.defaults': { 'django.views.defaults.bad_request': 'http://django.readthedocs.org/en/latest/ref/views.html#django.views.defaults.bad_request',
'django.views.defaults.page_not_found': 'http://django.readthedocs.org/en/latest/ref/views.html#django.views.defaults.page_not_found',
'django.views.defaults.permission_denied': 'http://django.readthedocs.org/en/latest/ref/views.html#django.views.defaults.permission_denied',
'django.views.defaults.server_error': 'http://django.readthedocs.org/en/latest/ref/views.html#django.views.defaults.server_error'},
'django.views.static': {'django.views.static.serve': 'http://django.readthedocs.org/en/latest/ref/views.html#django.views.static.serve'}},
'settings': {'lib_path': 'nbdev_django'}}
| 195.138427 | 309 | 0.662601 | 38,903 | 352,420 | 5.945788 | 0.033905 | 0.07825 | 0.163599 | 0.186971 | 0.932912 | 0.822882 | 0.762729 | 0.69664 | 0.656896 | 0.642971 | 0 | 0.00072 | 0.199696 | 352,420 | 1,805 | 310 | 195.246537 | 0.8194 | 0.000096 | 0 | 0 | 1 | 0.999445 | 0.766817 | 0.213372 | 0 | 0 | 0 | 0 | 0.011093 | 1 | 0 | false | 0.021631 | 0.003328 | 0 | 0.003328 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
7c9229dc9a6546ec6accd9910cc1960e2d5a1a3a | 427 | py | Python | datagym/__init__.py | datagym-ai/datagym-python | 65b5b2edaba1d401b08007e86a787585a88b33ae | [
"BSD-3-Clause"
] | 4 | 2020-04-28T23:00:15.000Z | 2022-03-21T19:34:10.000Z | datagym/__init__.py | datagym-ai/datagym-python | 65b5b2edaba1d401b08007e86a787585a88b33ae | [
"BSD-3-Clause"
] | 2 | 2022-02-07T10:34:33.000Z | 2022-02-07T10:53:46.000Z | datagym/__init__.py | datagym-ai/datagym-python | 65b5b2edaba1d401b08007e86a787585a88b33ae | [
"BSD-3-Clause"
] | null | null | null | from datagym.client import Client # noqa
from datagym.importers.coco import Coco # noqa
from datagym.exceptions.exceptions import ClientException, APIException # noqa
from datagym.models.label_config import LabelConfig # noqa
from datagym.models.image import Image # noqa
from datagym.models.video import Video # noqa
from datagym.models.project import Project # noqa
from datagym.models.dataset import Dataset # noqa | 53.375 | 80 | 0.807963 | 57 | 427 | 6.035088 | 0.315789 | 0.255814 | 0.305233 | 0.305233 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.135831 | 427 | 8 | 81 | 53.375 | 0.932249 | 0.091335 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
7cc63cf82414555454888e2464d1d34081c6e18f | 266 | py | Python | covertutils/covertutils/shells/impl/__init__.py | aidden-laoch/sabre | 0940aa51dfc5074291df9d29db827ddb4010566d | [
"MIT"
] | 2 | 2020-11-23T23:54:32.000Z | 2021-05-25T12:28:05.000Z | covertutils/covertutils/shells/impl/__init__.py | aidden-laoch/sabre | 0940aa51dfc5074291df9d29db827ddb4010566d | [
"MIT"
] | 1 | 2021-03-20T05:43:02.000Z | 2021-03-20T05:43:02.000Z | covertutils/covertutils/shells/impl/__init__.py | aidden-laoch/sabre | 0940aa51dfc5074291df9d29db827ddb4010566d | [
"MIT"
] | null | null | null |
from covertutils.shells.impl.standardshell import StandardShell
from covertutils.shells.impl.meterpretershell import MeterpreterShell
from covertutils.shells.impl.simpleshell import SimpleShell
from covertutils.shells.impl.extendableshell import ExtendableShell
| 29.555556 | 69 | 0.879699 | 28 | 266 | 8.357143 | 0.321429 | 0.25641 | 0.358974 | 0.42735 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075188 | 266 | 8 | 70 | 33.25 | 0.95122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
7ce9bc3d949427f6de010427a77843112c30b205 | 179,244 | py | Python | pypureclient/pure1/Pure1_1_0/client.py | bcai-ps/py-pure-client | d23de5cde4f4db17b85b1ba137235ae368a59c8c | [
"BSD-2-Clause"
] | null | null | null | pypureclient/pure1/Pure1_1_0/client.py | bcai-ps/py-pure-client | d23de5cde4f4db17b85b1ba137235ae368a59c8c | [
"BSD-2-Clause"
] | 1 | 2021-11-17T18:59:56.000Z | 2021-11-17T18:59:56.000Z | pypureclient/pure1/Pure1_1_0/client.py | bcai-ps/py-pure-client | d23de5cde4f4db17b85b1ba137235ae368a59c8c | [
"BSD-2-Clause"
] | 1 | 2021-11-02T21:47:34.000Z | 2021-11-02T21:47:34.000Z | import json
import os
import platform
import time
from typing import List, Optional
from ...exceptions import PureError
from ...keywords import Headers, Responses
from ...responses import ValidResponse, ErrorResponse, ApiError, ItemIterator
from ...token_manager import TokenManager
from .api_client import ApiClient
from .rest import ApiException
from . import api
from . import models
class Client(object):
"""
A client for making REST API calls to Pure1.
"""
APP_ID_KEY = 'app_id'
APP_ID_ENV = 'PURE1_APP_ID'
ID_TOKEN_KEY = 'id_token'
ID_TOKEN_ENV = 'PURE1_ID_TOKEN'
PRIVATE_KEY_FILE_KEY = 'private_key_file'
PRIVATE_KEY_FILE_ENV = 'PURE1_PRIVATE_KEY_FILE'
PRIVATE_KEY_PASSWORD_KEY = 'private_key_password'
PRIVATE_KEY_PASSWORD_ENV = 'PURE1_PRIVATE_KEY_PASSWORD'
RETRIES_KEY = 'retries'
RETRIES_DEFAULT = 5
TOKEN_ENDPOINT = 'https://api.pure1.purestorage.com/oauth2/1.0/token'
TIMEOUT_KEY = 'timeout'
TIMEOUT_DEFAULT = 15.0
# Format: client/client_version/endpoint/endpoint_version/system/release
USER_AGENT = ('pypureclient/1.11.0/Pure1/1.0/{sys}/{rel}'
.format(sys=platform.system(), rel=platform.release()))
def __init__(self, **kwargs):
"""
Initialize a Pure1 Client.
Keyword args:
app_id (str, optional): The registered App ID for Pure1 to use.
Defaults to the set environment variable under PURE1_APP_ID.
id_token (str, optional): The ID token to use. Overrides given
App ID and private key. Defaults to environment variable set
under PURE1_ID_TOKEN.
private_key_file (str, optional): The path of the private key to
use. Defaults to the set environment variable under
PURE1_PRIVATE_KEY_FILE.
private_key_password (str, optional): The password of the private
key, if encrypted. Defaults to the set environment variable
under PURE1_PRIVATE_KEY_FILE. Defaults to None.
retries (int, optional): The number of times to retry an API call if
it failed for a non-blocking reason. Defaults to 5.
timeout (float or (float, float), optional): The timeout
duration in seconds, either in total time or (connect and read)
times. Defaults to 15.0 total.
Raises:
PureError: If it could not create an ID or access token
"""
app_id = (kwargs.get(self.APP_ID_KEY)
if self.APP_ID_KEY in kwargs
else os.getenv(self.APP_ID_ENV))
private_key_file = (kwargs.get(self.PRIVATE_KEY_FILE_KEY)
if self.PRIVATE_KEY_FILE_KEY in kwargs
else os.getenv(self.PRIVATE_KEY_FILE_ENV))
private_key_password = (kwargs.get(self.PRIVATE_KEY_PASSWORD_KEY)
if self.PRIVATE_KEY_PASSWORD_KEY in kwargs
else os.getenv(self.PRIVATE_KEY_PASSWORD_ENV))
id_token = (kwargs.get(self.ID_TOKEN_KEY)
if self.ID_TOKEN_KEY in kwargs
else os.getenv(self.ID_TOKEN_ENV))
self._token_man = TokenManager(self.TOKEN_ENDPOINT,
id_token=id_token,
private_key_file=private_key_file,
private_key_password=private_key_password,
payload={'iss': app_id})
# Read timeout and retries from kwargs
self._retries = (kwargs.get(self.RETRIES_KEY)
if self.RETRIES_KEY in kwargs
else self.RETRIES_DEFAULT)
self._timeout = (kwargs.get(self.TIMEOUT_KEY)
if (self.TIMEOUT_KEY in kwargs and
isinstance(kwargs.get(self.TIMEOUT_KEY), (tuple, float)))
else self.TIMEOUT_DEFAULT)
# Instantiate the client and authorize it
self._api_client = ApiClient()
self._api_client.configuration.host = "https://api.pure1.purestorage.com"
self._set_agent_header()
self._set_auth_header()
# Instantiate APIs
self._alerts_api = api.AlertsApi(self._api_client)
self._arrays_api = api.ArraysApi(self._api_client)
self._audits_api = api.AuditsApi(self._api_client)
self._blades_api = api.BladesApi(self._api_client)
self._bucket_replica_links_api = api.BucketReplicaLinksApi(self._api_client)
self._buckets_api = api.BucketsApi(self._api_client)
self._controllers_api = api.ControllersApi(self._api_client)
self._directories_api = api.DirectoriesApi(self._api_client)
self._drives_api = api.DrivesApi(self._api_client)
self._file_system_replica_links_api = api.FileSystemReplicaLinksApi(self._api_client)
self._file_system_snapshots_api = api.FileSystemSnapshotsApi(self._api_client)
self._file_systems_api = api.FileSystemsApi(self._api_client)
self._hardware_api = api.HardwareApi(self._api_client)
self._hardware_connectors_api = api.HardwareConnectorsApi(self._api_client)
self._metrics_api = api.MetricsApi(self._api_client)
self._network_interfaces_api = api.NetworkInterfacesApi(self._api_client)
self._object_store_accounts_api = api.ObjectStoreAccountsApi(self._api_client)
self._pod_replica_links_api = api.PodReplicaLinksApi(self._api_client)
self._pods_api = api.PodsApi(self._api_client)
self._policies_api = api.PoliciesApi(self._api_client)
self._ports_api = api.PortsApi(self._api_client)
self._subscriptions_api = api.SubscriptionsApi(self._api_client)
self._targets_api = api.TargetsApi(self._api_client)
self._volume_snapshots_api = api.VolumeSnapshotsApi(self._api_client)
self._volumes_api = api.VolumesApi(self._api_client)
def get_access_token(self, refresh=False):
"""
Get the last used access token.
Args:
refresh (bool, optional):
Whether to retrieve a new access token. Defaults to False.
Returns:
str
Raises:
PureError: If there was an error retrieving an access token.
"""
return self._token_man.get_access_token(refresh)
def get_alerts(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.AlertsGetResponse
"""
Retrieves information about alerts generated by Pure1-monitored appliances.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._alerts_api.api10_alerts_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_arrays(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayGetResponse
"""
Retrieves information about FlashArray and FlashBlade storage appliances.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._arrays_api.api10_arrays_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_arrays_support_contracts(
self,
resources=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
offset=None, # type: int
resource_ids=None, # type: List[str]
resource_names=None, # type: List[str]
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.SupportContractGetResponse
"""
Retrieves the support contracts associated with arrays.
Args:
resources (list[FixedReference], optional):
A list of resources to query for. Overrides resource_ids and resource_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
offset (int, optional):
The offset of the first resource to return from a collection.
resource_ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`resource_id` element, an error is returned.
resource_names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `resource_name` element, an error is returned.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
offset=offset,
resource_ids=resource_ids,
resource_names=resource_names,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._arrays_api.api10_arrays_support_contracts_get_with_http_info
_process_references(resources, ['resource_ids', 'resource_names'], kwargs)
return self._call_api(endpoint, kwargs)
def put_arrays_tags(
self,
resources=None, # type: List[models.ReferenceType]
tag=None, # type: List[models.TagPut]
authorization=None, # type: str
x_request_id=None, # type: str
namespaces=None, # type: List[str]
resource_ids=None, # type: List[str]
resource_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.TagResponse
"""
Creates or updates array tags contextual to Pure1 only.
Args:
resources (list[FixedReference], optional):
A list of resources to query for. Overrides resource_ids and resource_names keyword arguments.
tag (list[TagPut], required):
A list of tags to be upserted.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
namespaces (list[str], optional):
A list of namespaces.
resource_ids (list[str], optional):
REQUIRED: either `resource_ids` or `resource_names`. A list of resource IDs. If
there is not at least one resource that matches each `resource_id` element, an
error is returned.
resource_names (list[str], optional):
REQUIRED: either `resource_ids` or `resource_names`. A list of resource names.
If there is not at least one resource that matches each `resource_name` element,
an error is returned.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
tag=tag,
authorization=authorization,
x_request_id=x_request_id,
namespaces=namespaces,
resource_ids=resource_ids,
resource_names=resource_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._arrays_api.api10_arrays_tags_batch_put_with_http_info
_process_references(resources, ['resource_ids', 'resource_names'], kwargs)
return self._call_api(endpoint, kwargs)
def delete_arrays_tags(
self,
resources=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
keys=None, # type: List[str]
namespaces=None, # type: List[str]
resource_ids=None, # type: List[str]
resource_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> None
"""
Deletes array tags from Pure1.
Args:
resources (list[FixedReference], optional):
A list of resources to query for. Overrides resource_ids and resource_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
keys (list[str], optional):
A list of tag keys.
namespaces (list[str], optional):
A list of namespaces.
resource_ids (list[str], optional):
REQUIRED: either `resource_ids` or `resource_names`. A list of resource IDs. If
there is not at least one resource that matches each `resource_id` element, an
error is returned.
resource_names (list[str], optional):
REQUIRED: either `resource_ids` or `resource_names`. A list of resource names.
If there is not at least one resource that matches each `resource_name` element,
an error is returned.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
keys=keys,
namespaces=namespaces,
resource_ids=resource_ids,
resource_names=resource_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._arrays_api.api10_arrays_tags_delete_with_http_info
_process_references(resources, ['resource_ids', 'resource_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_arrays_tags(
self,
resources=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
keys=None, # type: List[str]
limit=None, # type: int
namespaces=None, # type: List[str]
offset=None, # type: int
resource_ids=None, # type: List[str]
resource_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.TagGetResponse
"""
Retrieves the tags associated with specified arrays.
Args:
resources (list[FixedReference], optional):
A list of resources to query for. Overrides resource_ids and resource_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
keys (list[str], optional):
A list of tag keys.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
namespaces (list[str], optional):
A list of namespaces.
offset (int, optional):
The offset of the first resource to return from a collection.
resource_ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`resource_id` element, an error is returned.
resource_names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `resource_name` element, an error is returned.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
keys=keys,
limit=limit,
namespaces=namespaces,
offset=offset,
resource_ids=resource_ids,
resource_names=resource_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._arrays_api.api10_arrays_tags_get_with_http_info
_process_references(resources, ['resource_ids', 'resource_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_audits(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.AuditsGetResponse
"""
Retrieves audit objects.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._audits_api.api10_audits_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_blades(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.BladeGetResponse
"""
Retrieves information about FlashBlade blades.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._blades_api.api10_blades_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_bucket_replica_links(
self,
references=None, # type: List[models.ReferenceType]
members=None, # type: List[models.ReferenceType]
sources=None, # type: List[models.ReferenceType]
targets=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
source_ids=None, # type: List[str]
source_names=None, # type: List[str]
target_ids=None, # type: List[str]
target_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.BucketReplicaLinkGetResponse
"""
Retrieves information about bucket replica links.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids keyword arguments.
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
sources (list[FixedReference], optional):
A list of sources to query for. Overrides source_ids and source_names keyword arguments.
targets (list[FixedReference], optional):
A list of targets to query for. Overrides target_ids and target_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. Member IDs separated by a `+` indicate that both members
must be present in each element. Member IDs separated by a `,` indicate that at
least one member must be present in each element. If there is not at least one
resource that matches each `member_id` element, an error is returned. When
using Try it Out in Swagger, a list of member IDs separated by a `+` must be
entered in the same item cell.
member_names (list[str], optional):
A list of member names. Member names separated by a `+` indicate that both
members must be present in each element. Member names separated by a `,`
indicate that at least one member must be present in each element. If there is
not at least one resource that matches each `member_name` element, an error is
returned. When using Try it Out in Swagger, a list of member names separated by
a `+` must be entered in the same item cell.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
source_ids (list[str], optional):
A list of source IDs. Source IDs separated by a `+` indicate that both sources
must be present in each element. Source IDs separated by a `,` indicate that at
least one source must be present in each element. If there is not at least one
resource that matches each `source_id` element, an error is returned. When
using Try it Out in Swagger, a list of source IDs separated by a `+` must be
entered in the same item cell.
source_names (list[str], optional):
A list of source names. Source names separated by a `+` indicate that both
sources must be present in each element. Source names separated by a `,`
indicate that at least one source must be present in each element. If there is
not at least one resource that matches each `source_name` element, an error is
returned. When using Try it Out in Swagger, a list of source names separated by
a `+` must be entered in the same item cell.
target_ids (list[str], optional):
A list of target IDs. Target IDs separated by a `+` indicate that both targets
must be present in each element. Target IDs separated by a `,` indicate that at
least one target must be present in each element. If there is not at least one
resource that matches each `target_id` element, an error is returned. When
using Try it Out in Swagger, a list of target IDs separated by a `+` must be
entered in the same item cell.
target_names (list[str], optional):
A list of target names. Target names separated by a `+` indicate that both
targets must be present in each element. Target names separated by a `,`
indicate that at least one target must be present in each element. If there is
not at least one resource that matches each `target_name` element, an error is
returned. When using Try it Out in Swagger, a list of target names separated by
a `+` must be entered in the same item cell.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
member_ids=member_ids,
member_names=member_names,
offset=offset,
sort=sort,
source_ids=source_ids,
source_names=source_names,
target_ids=target_ids,
target_names=target_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._bucket_replica_links_api.api10_bucket_replica_links_get_with_http_info
_process_references(references, ['ids'], kwargs)
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(sources, ['source_ids', 'source_names'], kwargs)
_process_references(targets, ['target_ids', 'target_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_buckets(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.BucketGetResponse
"""
Retrieves buckets.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._buckets_api.api10_buckets_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_controllers(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ControllerGetResponse
"""
Retrieves information about controllers.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._controllers_api.api10_controllers_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_directories(
self,
file_systems=None, # type: List[models.ReferenceType]
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
file_system_ids=None, # type: List[str]
file_system_names=None, # type: List[str]
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.DirectoryGetResponse
"""
Retrieves information about FlashArray managed directory objects.
Args:
file_systems (list[FixedReference], optional):
A list of file_systems to query for. Overrides file_system_ids and file_system_names keyword arguments.
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
file_system_ids (list[str], optional):
Performs the operation on the file system ID specified. Enter multiple file
system IDs in comma-separated format. The `file_system_ids` and
`file_system_names` parameters cannot be provided together.
file_system_names (list[str], optional):
Performs the operation on the file system name specified. Enter multiple file
system names in comma-separated format. For example, `filesystem1,filesystem2`.
The `file_system_ids` and `file_system_names` parameters cannot be provided
together.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
file_system_ids=file_system_ids,
file_system_names=file_system_names,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._directories_api.api10_directories_get_with_http_info
_process_references(file_systems, ['file_system_ids', 'file_system_names'], kwargs)
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_drives(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.DriveGetResponse
"""
Retrieves information about FlashArray drives.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._drives_api.api10_drives_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_file_system_replica_links(
self,
references=None, # type: List[models.ReferenceType]
members=None, # type: List[models.ReferenceType]
sources=None, # type: List[models.ReferenceType]
targets=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
source_ids=None, # type: List[str]
source_names=None, # type: List[str]
target_ids=None, # type: List[str]
target_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemReplicaLinkGetResponse
"""
Retrieves information about FlashBlade file system replica links.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids keyword arguments.
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
sources (list[FixedReference], optional):
A list of sources to query for. Overrides source_ids and source_names keyword arguments.
targets (list[FixedReference], optional):
A list of targets to query for. Overrides target_ids and target_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. Member IDs separated by a `+` indicate that both members
must be present in each element. Member IDs separated by a `,` indicate that at
least one member must be present in each element. If there is not at least one
resource that matches each `member_id` element, an error is returned. When
using Try it Out in Swagger, a list of member IDs separated by a `+` must be
entered in the same item cell.
member_names (list[str], optional):
A list of member names. Member names separated by a `+` indicate that both
members must be present in each element. Member names separated by a `,`
indicate that at least one member must be present in each element. If there is
not at least one resource that matches each `member_name` element, an error is
returned. When using Try it Out in Swagger, a list of member names separated by
a `+` must be entered in the same item cell.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
source_ids (list[str], optional):
A list of source IDs. Source IDs separated by a `+` indicate that both sources
must be present in each element. Source IDs separated by a `,` indicate that at
least one source must be present in each element. If there is not at least one
resource that matches each `source_id` element, an error is returned. When
using Try it Out in Swagger, a list of source IDs separated by a `+` must be
entered in the same item cell.
source_names (list[str], optional):
A list of source names. Source names separated by a `+` indicate that both
sources must be present in each element. Source names separated by a `,`
indicate that at least one source must be present in each element. If there is
not at least one resource that matches each `source_name` element, an error is
returned. When using Try it Out in Swagger, a list of source names separated by
a `+` must be entered in the same item cell.
target_ids (list[str], optional):
A list of target IDs. Target IDs separated by a `+` indicate that both targets
must be present in each element. Target IDs separated by a `,` indicate that at
least one target must be present in each element. If there is not at least one
resource that matches each `target_id` element, an error is returned. When
using Try it Out in Swagger, a list of target IDs separated by a `+` must be
entered in the same item cell.
target_names (list[str], optional):
A list of target names. Target names separated by a `+` indicate that both
targets must be present in each element. Target names separated by a `,`
indicate that at least one target must be present in each element. If there is
not at least one resource that matches each `target_name` element, an error is
returned. When using Try it Out in Swagger, a list of target names separated by
a `+` must be entered in the same item cell.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
member_ids=member_ids,
member_names=member_names,
offset=offset,
sort=sort,
source_ids=source_ids,
source_names=source_names,
target_ids=target_ids,
target_names=target_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._file_system_replica_links_api.api10_file_system_replica_links_get_with_http_info
_process_references(references, ['ids'], kwargs)
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(sources, ['source_ids', 'source_names'], kwargs)
_process_references(targets, ['target_ids', 'target_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_file_system_replica_links_policies(
self,
members=None, # type: List[models.ReferenceType]
policies=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""
Retrieves pairs of FlashBlade file system replica link members and their
policies.
Args:
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
policies (list[FixedReference], optional):
A list of policies to query for. Overrides policy_ids and policy_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. If there is not at least one resource that matches each
`member_id` element, an error is returned.
member_names (list[str], optional):
A list of member names. If there is not at least one resource that matches each
`member_name` element, an error is returned.
policy_ids (list[str], optional):
A list of policy IDs. If there is not at least one resource that matches each
`policy_id` element, an error is returned.
policy_names (list[str], optional):
A list of policy names. If there is not at least one resource that matches each
`policy_name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
member_ids=member_ids,
member_names=member_names,
policy_ids=policy_ids,
policy_names=policy_names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._file_system_replica_links_api.api10_file_system_replica_links_policies_get_with_http_info
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(policies, ['policy_ids', 'policy_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_file_system_snapshots(
self,
references=None, # type: List[models.ReferenceType]
sources=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
source_ids=None, # type: List[str]
source_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemSnapshotGetResponse
"""
Retrieves snapshots of FlashBlade file systems.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
sources (list[FixedReference], optional):
A list of sources to query for. Overrides source_ids and source_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
source_ids (list[str], optional):
A list of ids for the source of the object. If there is not at least one
resource that matches each `source_id` element, an error is returned.
source_names (list[str], optional):
A list of names for the source of the object. If there is not at least one
resource that matches each `source_name` element, an error is returned.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
source_ids=source_ids,
source_names=source_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._file_system_snapshots_api.api10_file_system_snapshots_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
_process_references(sources, ['source_ids', 'source_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_file_system_snapshots_policies(
self,
members=None, # type: List[models.ReferenceType]
policies=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""
Retrieves pairs of FlashBlade file system snapshot members and their policies.
Args:
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
policies (list[FixedReference], optional):
A list of policies to query for. Overrides policy_ids and policy_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. If there is not at least one resource that matches each
`member_id` element, an error is returned.
member_names (list[str], optional):
A list of member names. If there is not at least one resource that matches each
`member_name` element, an error is returned.
policy_ids (list[str], optional):
A list of policy IDs. If there is not at least one resource that matches each
`policy_id` element, an error is returned.
policy_names (list[str], optional):
A list of policy names. If there is not at least one resource that matches each
`policy_name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
member_ids=member_ids,
member_names=member_names,
policy_ids=policy_ids,
policy_names=policy_names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._file_system_snapshots_api.api10_file_system_snapshots_policies_get_with_http_info
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(policies, ['policy_ids', 'policy_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_file_systems(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.FileSystemGetResponse
"""
Retrieves information about FlashArray and FlashBlade file system objects.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._file_systems_api.api10_file_systems_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_file_systems_policies(
self,
members=None, # type: List[models.ReferenceType]
policies=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""
Retrieves pairs of FlashBlade file system members and their policies.
Args:
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
policies (list[FixedReference], optional):
A list of policies to query for. Overrides policy_ids and policy_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. If there is not at least one resource that matches each
`member_id` element, an error is returned.
member_names (list[str], optional):
A list of member names. If there is not at least one resource that matches each
`member_name` element, an error is returned.
policy_ids (list[str], optional):
A list of policy IDs. If there is not at least one resource that matches each
`policy_id` element, an error is returned.
policy_names (list[str], optional):
A list of policy names. If there is not at least one resource that matches each
`policy_name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
member_ids=member_ids,
member_names=member_names,
policy_ids=policy_ids,
policy_names=policy_names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._file_systems_api.api10_file_systems_policies_get_with_http_info
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(policies, ['policy_ids', 'policy_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_hardware(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.HardwareGetResponse
"""
Retrieves information about hardware components.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._hardware_api.api10_hardware_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_hardware_connectors(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.HardwareConnectorGetResponse
"""
Retrieves information about FlashBlade hardware connectors.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._hardware_connectors_api.api10_hardware_connectors_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_metrics(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
resource_types=None, # type: List[str]
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.MetricGetResponse
"""
Retrieves information about metrics that can be queried for.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
resource_types (list[str], optional):
The resource types to list the available metrics. Valid values are `arrays`,
`volumes`, and `pods`. A metric can belong to a combination of resources, e.g.,
write-iops from array to pod. In that case, query by ['arrays', 'pods'].
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
resource_types=resource_types,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._metrics_api.api10_metrics_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_metrics_history(
self,
references=None, # type: List[models.ReferenceType]
resources=None, # type: List[models.ReferenceType]
aggregation=None, # type: str
end_time=None, # type: int
resolution=None, # type: int
start_time=None, # type: int
authorization=None, # type: str
x_request_id=None, # type: str
ids=None, # type: List[str]
names=None, # type: List[str]
resource_ids=None, # type: List[str]
resource_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.MetricHistoryGetResponse
"""
Retrieves historical metric data for resources.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
resources (list[FixedReference], optional):
A list of resources to query for. Overrides resource_ids and resource_names keyword arguments.
aggregation (str, required):
Aggregation needed on the metric data. Valid values are `avg` and `max`.
end_time (int, required):
When the time window ends (in milliseconds since epoch).
resolution (int, required):
The duration of time between individual data points, in milliseconds.
start_time (int, required):
When the time window starts (in milliseconds since epoch).
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
ids (list[str], optional):
REQUIRED: either `ids` or `names`. A list of object IDs. If there is not at
least one resource that matches each `id` element, an error is returned.
names (list[str], optional):
REQUIRED: either `names` or `ids`. A list of resource names. If there is not at
least one resource that matches each `name` element, an error is returned.
resource_ids (list[str], optional):
REQUIRED: either `resource_ids` or `resource_names`. A list of resource IDs. If
there is not at least one resource that matches each `resource_id` element, an
error is returned.
resource_names (list[str], optional):
REQUIRED: either `resource_ids` or `resource_names`. A list of resource names.
If there is not at least one resource that matches each `resource_name` element,
an error is returned.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
aggregation=aggregation,
end_time=end_time,
resolution=resolution,
start_time=start_time,
authorization=authorization,
x_request_id=x_request_id,
ids=ids,
names=names,
resource_ids=resource_ids,
resource_names=resource_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._metrics_api.api10_metrics_history_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
_process_references(resources, ['resource_ids', 'resource_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_network_interfaces(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.NetworkInterfaceGetResponse
"""
Retrieves information about physical and virtual network interface objects.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._network_interfaces_api.api10_network_interfaces_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_object_store_accounts(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ObjectStoreAccountGetResponse
"""
Retrieves object store accounts.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._object_store_accounts_api.api10_object_store_accounts_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_pod_replica_links(
self,
references=None, # type: List[models.ReferenceType]
members=None, # type: List[models.ReferenceType]
sources=None, # type: List[models.ReferenceType]
targets=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
source_ids=None, # type: List[str]
source_names=None, # type: List[str]
target_ids=None, # type: List[str]
target_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PodReplicaLinkGetResponse
"""
Retrieves information about pod replica links.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids keyword arguments.
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
sources (list[FixedReference], optional):
A list of sources to query for. Overrides source_ids and source_names keyword arguments.
targets (list[FixedReference], optional):
A list of targets to query for. Overrides target_ids and target_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. Member IDs separated by a `+` indicate that both members
must be present in each element. Member IDs separated by a `,` indicate that at
least one member must be present in each element. If there is not at least one
resource that matches each `member_id` element, an error is returned. When
using Try it Out in Swagger, a list of member IDs separated by a `+` must be
entered in the same item cell.
member_names (list[str], optional):
A list of member names. Member names separated by a `+` indicate that both
members must be present in each element. Member names separated by a `,`
indicate that at least one member must be present in each element. If there is
not at least one resource that matches each `member_name` element, an error is
returned. When using Try it Out in Swagger, a list of member names separated by
a `+` must be entered in the same item cell.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
source_ids (list[str], optional):
A list of source IDs. Source IDs separated by a `+` indicate that both sources
must be present in each element. Source IDs separated by a `,` indicate that at
least one source must be present in each element. If there is not at least one
resource that matches each `source_id` element, an error is returned. When
using Try it Out in Swagger, a list of source IDs separated by a `+` must be
entered in the same item cell.
source_names (list[str], optional):
A list of source names. Source names separated by a `+` indicate that both
sources must be present in each element. Source names separated by a `,`
indicate that at least one source must be present in each element. If there is
not at least one resource that matches each `source_name` element, an error is
returned. When using Try it Out in Swagger, a list of source names separated by
a `+` must be entered in the same item cell.
target_ids (list[str], optional):
A list of target IDs. Target IDs separated by a `+` indicate that both targets
must be present in each element. Target IDs separated by a `,` indicate that at
least one target must be present in each element. If there is not at least one
resource that matches each `target_id` element, an error is returned. When
using Try it Out in Swagger, a list of target IDs separated by a `+` must be
entered in the same item cell.
target_names (list[str], optional):
A list of target names. Target names separated by a `+` indicate that both
targets must be present in each element. Target names separated by a `,`
indicate that at least one target must be present in each element. If there is
not at least one resource that matches each `target_name` element, an error is
returned. When using Try it Out in Swagger, a list of target names separated by
a `+` must be entered in the same item cell.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
member_ids=member_ids,
member_names=member_names,
offset=offset,
sort=sort,
source_ids=source_ids,
source_names=source_names,
target_ids=target_ids,
target_names=target_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._pod_replica_links_api.api10_pod_replica_links_get_with_http_info
_process_references(references, ['ids'], kwargs)
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(sources, ['source_ids', 'source_names'], kwargs)
_process_references(targets, ['target_ids', 'target_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_pods(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PodGetResponse
"""
Retrieves information about pod objects.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._pods_api.api10_pods_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_policies_file_system_replica_links(
self,
members=None, # type: List[models.ReferenceType]
policies=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""
Retrieves pairs of policy references and their FlashBlade file system replica
link members.
Args:
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
policies (list[FixedReference], optional):
A list of policies to query for. Overrides policy_ids and policy_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. If there is not at least one resource that matches each
`member_id` element, an error is returned.
member_names (list[str], optional):
A list of member names. If there is not at least one resource that matches each
`member_name` element, an error is returned.
policy_ids (list[str], optional):
A list of policy IDs. If there is not at least one resource that matches each
`policy_id` element, an error is returned.
policy_names (list[str], optional):
A list of policy names. If there is not at least one resource that matches each
`policy_name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
member_ids=member_ids,
member_names=member_names,
policy_ids=policy_ids,
policy_names=policy_names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._policies_api.api10_policies_file_system_replica_links_get_with_http_info
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(policies, ['policy_ids', 'policy_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_policies_file_system_snapshots(
self,
members=None, # type: List[models.ReferenceType]
policies=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""
Retrieves pairs of policy references and their FlashBlade file system snapshot
members.
Args:
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
policies (list[FixedReference], optional):
A list of policies to query for. Overrides policy_ids and policy_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. If there is not at least one resource that matches each
`member_id` element, an error is returned.
member_names (list[str], optional):
A list of member names. If there is not at least one resource that matches each
`member_name` element, an error is returned.
policy_ids (list[str], optional):
A list of policy IDs. If there is not at least one resource that matches each
`policy_id` element, an error is returned.
policy_names (list[str], optional):
A list of policy names. If there is not at least one resource that matches each
`policy_name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
member_ids=member_ids,
member_names=member_names,
policy_ids=policy_ids,
policy_names=policy_names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._policies_api.api10_policies_file_system_snapshots_get_with_http_info
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(policies, ['policy_ids', 'policy_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_policies_file_systems(
self,
members=None, # type: List[models.ReferenceType]
policies=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""
Retrieves pairs of policy references and their FlashBlade file system members.
Args:
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
policies (list[FixedReference], optional):
A list of policies to query for. Overrides policy_ids and policy_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. If there is not at least one resource that matches each
`member_id` element, an error is returned.
member_names (list[str], optional):
A list of member names. If there is not at least one resource that matches each
`member_name` element, an error is returned.
policy_ids (list[str], optional):
A list of policy IDs. If there is not at least one resource that matches each
`policy_id` element, an error is returned.
policy_names (list[str], optional):
A list of policy names. If there is not at least one resource that matches each
`policy_name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
member_ids=member_ids,
member_names=member_names,
policy_ids=policy_ids,
policy_names=policy_names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._policies_api.api10_policies_file_systems_get_with_http_info
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(policies, ['policy_ids', 'policy_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_policies(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyGetResponse
"""
Retrieves policies and their rules.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._policies_api.api10_policies_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_policies_members(
self,
members=None, # type: List[models.ReferenceType]
policies=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
member_ids=None, # type: List[str]
member_names=None, # type: List[str]
policy_ids=None, # type: List[str]
policy_names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PolicyMembersGetResponse
"""
Retrieves pairs of policy references and their members.
Args:
members (list[FixedReference], optional):
A list of members to query for. Overrides member_ids and member_names keyword arguments.
policies (list[FixedReference], optional):
A list of policies to query for. Overrides policy_ids and policy_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
member_ids (list[str], optional):
A list of member IDs. If there is not at least one resource that matches each
`member_id` element, an error is returned.
member_names (list[str], optional):
A list of member names. If there is not at least one resource that matches each
`member_name` element, an error is returned.
policy_ids (list[str], optional):
A list of policy IDs. If there is not at least one resource that matches each
`policy_id` element, an error is returned.
policy_names (list[str], optional):
A list of policy names. If there is not at least one resource that matches each
`policy_name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
limit=limit,
member_ids=member_ids,
member_names=member_names,
policy_ids=policy_ids,
policy_names=policy_names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._policies_api.api10_policies_members_get_with_http_info
_process_references(members, ['member_ids', 'member_names'], kwargs)
_process_references(policies, ['policy_ids', 'policy_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_ports(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.PortGetResponse
"""
Retrieves information about FlashArray ports.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._ports_api.api10_ports_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_subscription_licenses(
self,
references=None, # type: List[models.ReferenceType]
marketplace_partner_references=None, # type: List[models.ReferenceType]
subscriptions=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
marketplace_partner_reference_ids=None, # type: List[str]
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
subscription_ids=None, # type: List[str]
subscription_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.SubscriptionLicenseGetResponse
"""
Retrieves information about Pure1 subscription licenses.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
marketplace_partner_references (list[FixedReference], optional):
A list of marketplace_partner_references to query for. Overrides marketplace_partner_reference_ids keyword arguments.
subscriptions (list[FixedReference], optional):
A list of subscriptions to query for. Overrides subscription_ids and subscription_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
marketplace_partner_reference_ids (list[str], optional):
A list of marketplace partner reference IDs. If there is not at least one
resource that matches each `marketplace_partner.reference_id` element, an error
is returned.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
subscription_ids (list[str], optional):
A list of subscription IDs. If there is not at least one resource that matches
each `subscription.id` element, an error is returned.
subscription_names (list[str], optional):
A list of subscription names. If there is not at least one resource that matches
each `subscription.name` element, an error is returned.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
marketplace_partner_reference_ids=marketplace_partner_reference_ids,
names=names,
offset=offset,
sort=sort,
subscription_ids=subscription_ids,
subscription_names=subscription_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._subscriptions_api.api10_subscription_licenses_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
_process_references(marketplace_partner_references, ['marketplace_partner_reference_ids'], kwargs)
_process_references(subscriptions, ['subscription_ids', 'subscription_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_subscriptions(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.SubscriptionGetResponse
"""
Retrieves information about Pure1 subscriptions.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._subscriptions_api.api10_subscriptions_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_targets(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.TargetGetResponse
"""
Retrieves information about targets.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._targets_api.api10_targets_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_volume_snapshots(
self,
references=None, # type: List[models.ReferenceType]
sources=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
source_ids=None, # type: List[str]
source_names=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.VolumeSnapshotGetResponse
"""
Retrieves information about snapshots of volumes.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
sources (list[FixedReference], optional):
A list of sources to query for. Overrides source_ids and source_names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
source_ids (list[str], optional):
A list of ids for the source of the object. If there is not at least one
resource that matches each `source_id` element, an error is returned.
source_names (list[str], optional):
A list of names for the source of the object. If there is not at least one
resource that matches each `source_name` element, an error is returned.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
source_ids=source_ids,
source_names=source_names,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._volume_snapshots_api.api10_volume_snapshots_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
_process_references(sources, ['source_ids', 'source_names'], kwargs)
return self._call_api(endpoint, kwargs)
def get_volumes(
self,
references=None, # type: List[models.ReferenceType]
authorization=None, # type: str
x_request_id=None, # type: str
continuation_token=None, # type: str
filter=None, # type: str
ids=None, # type: List[str]
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.VolumeGetResponse
"""
Retrieves information about volume objects.
Args:
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
x_request_id (str, optional):
A header to provide to track the API call. Generated by the server if not
provided.
continuation_token (str, optional):
An opaque token to iterate over a collection of resources.
filter (Filter, optional):
A filter to include only resources that match the specified criteria.
ids (list[str], optional):
A list of resource IDs. If there is not at least one resource that matches each
`id` element, an error is returned.
limit (int, optional):
Limit the number of resources in the response. If not specified, defaults to
1000.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each `name` element, an error is returned.
offset (int, optional):
The offset of the first resource to return from a collection.
sort (list[Property], optional):
Sort the response by the specified Properties. Can also be a single element.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs = dict(
authorization=authorization,
x_request_id=x_request_id,
continuation_token=continuation_token,
filter=filter,
ids=ids,
limit=limit,
names=names,
offset=offset,
sort=sort,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
)
kwargs = {k: v for k, v in kwargs.items() if v is not None}
endpoint = self._volumes_api.api10_volumes_get_with_http_info
_process_references(references, ['ids', 'names'], kwargs)
return self._call_api(endpoint, kwargs)
def _set_agent_header(self):
"""
Set the user-agent header of the internal client.
"""
self._api_client.set_default_header('User-Agent', self.USER_AGENT)
def _set_auth_header(self, refresh=False):
"""
Set the authorization header of the internal client with the access
token.
Args:
refresh (bool, optional): Whether to retrieve a new access token.
Defaults to False.
Raises:
PureError: If there was an error retrieving the access token.
"""
self._api_client.set_default_header('Authorization',
self._token_man.get_header(refresh=refresh))
def _call_api(self, api_function, kwargs):
"""
Call the API function and process the response. May call the API
repeatedly if the request failed for a reason that may not persist in
the next call.
Args:
api_function (function): Swagger-generated function to call.
kwargs (dict): kwargs to pass to the function.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
"""
kwargs['_request_timeout'] = self._timeout
retries = self._retries
while True:
try:
response = api_function(**kwargs)
# Call was successful (200)
return self._create_valid_response(response, api_function, kwargs)
except ApiException as error:
# If no chance for retries, return the error
if retries == 0:
return self._create_error_response(error)
# If bad request or not found, return the error (it will never work)
elif error.status in [400, 404]:
return self._create_error_response(error)
# If authentication error, reset access token and retry
elif error.status == 403:
self._set_auth_header(refresh=True)
# If rate limit error, wait the proper time and try again
elif error.status == 429:
# If the the minute limit hit, wait that long
if (int(error.headers.get(Headers.x_ratelimit_remaining_min))
== int(error.headers.get(Headers.x_ratelimit_min))):
time.sleep(60)
# Otherwise it was the second limit and only wait a second
time.sleep(1)
# If some internal server error we know nothing about, return
elif error.status == 500:
return self._create_error_response(error)
# If internal server errors that has to do with timeouts, try again
elif error.status > 500:
pass
# If error with the swagger client, raise the error
else:
raise PureError(error)
retries = retries - 1
def _create_valid_response(self, response, endpoint, kwargs):
"""
Create a ValidResponse from a Swagger response.
Args:
response (tuple): Body, status, header tuple as returned from a
Swagger client.
endpoint (function): The function of the Swagger client that was
called.
kwargs (dict): The processed kwargs that were passed to the
endpoint function.
Returns:
ValidResponse
"""
body, status, headers = response
if body is None:
continuation_token = None
total_item_count = None
items = None
else:
continuation_token = getattr(body, "continuation_token", None)
total_item_count = getattr(body, "total_item_count", None)
# *-get-response models have "continuation_token" attribute. Other models don't have them.
if "continuation_token" in body.attribute_map:
# None means that attribute is ignored in ItemIterator
more_items_remaining = None
else:
# Only GET responses are paged.
more_items_remaining = False
items = iter(ItemIterator(
client=self,
api_endpoint=endpoint,
kwargs=kwargs,
continuation_token=continuation_token,
total_item_count=total_item_count,
items=body.items,
x_request_id=headers.get(Headers.x_request_id),
more_items_remaining=more_items_remaining,
))
return ValidResponse(
status_code=status,
continuation_token=continuation_token,
total_item_count=total_item_count,
items=items,
headers=headers,
)
def _create_error_response(self, error):
"""
Create an ErrorResponse from a Swagger error.
Args:
error (ApiException):
Error returned by Swagger client.
Returns:
ErrorResponse
"""
status = error.status
body = json.loads(error.body)
if status in [403, 429]:
# Parse differently if the error message came from kong
errors = [ApiError(None, body.get(Responses.message, None))]
else:
errors = [ApiError(err.get(Responses.context, None),
err.get(Responses.message, None))
for err in body.get(Responses.errors, None)]
return ErrorResponse(status, errors, headers=error.headers)
def _process_references(references, params, kwargs):
"""
Process reference objects into a list of ids or names.
Removes ids and names arguments.
Args:
references (list[FixedReference]):
The references from which to extract ids or names.
params (list[Parameter]):
The parameters to be overridden.
kwargs (dict):
The kwargs to process.
Raises:
PureError: If a reference does not have an id or name.
"""
if references is not None:
if not isinstance(references, list):
references = [references]
for param in params:
kwargs.pop(param, None)
all_have_id = all(getattr(ref, 'id', None) is not None for ref in references)
all_have_name = all(getattr(ref, 'name', None) is not None for ref in references)
id_param = [param for param in params if param.endswith("ids")]
name_param = [param for param in params if param.endswith("names")]
if all_have_id and len(id_param) > 0:
kwargs[id_param[0]] = [getattr(ref, 'id') for ref in references]
elif all_have_name and len(name_param) > 0:
kwargs[name_param[0]] = [getattr(ref, 'name') for ref in references]
else:
raise PureError('Invalid reference for {}'.format(", ".join(params)))
| 46.460342 | 133 | 0.601459 | 20,895 | 179,244 | 5.001579 | 0.024408 | 0.036361 | 0.024917 | 0.024544 | 0.905127 | 0.896984 | 0.888678 | 0.882784 | 0.87955 | 0.876603 | 0 | 0.002424 | 0.332441 | 179,244 | 3,857 | 134 | 46.472388 | 0.87098 | 0.571813 | 0 | 0.817412 | 0 | 0 | 0.022739 | 0.002007 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027811 | false | 0.004232 | 0.00786 | 0 | 0.071947 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6b2b6e3b8d343074a21cd03c18be07856a777875 | 2,738 | py | Python | tests/test_gemm_fusion.py | openppl-public/ppq | 0fdea7d4982bc57feb6bb8548c7f012707fbd607 | [
"Apache-2.0"
] | 100 | 2021-12-31T09:34:06.000Z | 2022-03-25T02:54:51.000Z | tests/test_gemm_fusion.py | openppl-public/ppq | 0fdea7d4982bc57feb6bb8548c7f012707fbd607 | [
"Apache-2.0"
] | 12 | 2021-12-31T10:28:15.000Z | 2022-03-31T07:08:44.000Z | tests/test_gemm_fusion.py | openppl-public/ppq | 0fdea7d4982bc57feb6bb8548c7f012707fbd607 | [
"Apache-2.0"
] | 21 | 2021-12-31T09:51:02.000Z | 2022-03-30T12:21:55.000Z | from ppq import *
from ppq.IR.morph import GraphMerger
from ppq.api import *
import torch
graph = BaseGraph(name='test', built_from=NetworkFramework.ONNX)
matmul = \
graph.create_operation(op_type='Matmul', name='matmul',
platform=TargetPlatform.UNSPECIFIED,
inputs=[graph.create_variable(), graph.create_variable(is_parameter=True, value=torch.zeros(size=[10, 10]))],
outputs=[graph.create_variable()])
graph.create_operation(op_type='Add', name='add', platform=TargetPlatform.UNSPECIFIED,
inputs=[matmul.outputs[0], graph.create_variable(is_parameter=True, value=torch.zeros(size=[10, ]))],
outputs=[graph.create_variable()])
processor = GraphMerger(graph)
processor.fuse_gemm()
assert len(graph.operations) == 1
assert len(graph.operations['matmul'].inputs) == 3
assert graph.operations['matmul'].type == 'Gemm'
graph = BaseGraph(name='test', built_from=NetworkFramework.ONNX)
matmul = \
graph.create_operation(op_type='Matmul', name='matmul',
platform=TargetPlatform.UNSPECIFIED,
inputs=[graph.create_variable(), graph.create_variable(is_parameter=True, value=torch.zeros(size=[10, 10]))],
outputs=[graph.create_variable()])
test = \
graph.create_operation(op_type='Test', name='test', platform=TargetPlatform.UNSPECIFIED,
inputs=[], outputs=[graph.create_variable()])
graph.create_operation(op_type='Add', name='add', platform=TargetPlatform.UNSPECIFIED,
inputs=[matmul.outputs[0], test.outputs[0]],
outputs=[graph.create_variable()])
processor = GraphMerger(graph)
processor.fuse_gemm()
assert len(graph.operations) == 3
assert len(graph.operations['matmul'].inputs) == 2
assert graph.operations['matmul'].type == 'Gemm'
graph = BaseGraph(name='test', built_from=NetworkFramework.ONNX)
matmul = \
graph.create_operation(op_type='Matmul', name='matmul',
platform=TargetPlatform.UNSPECIFIED,
inputs=[graph.create_variable(), graph.create_variable(is_parameter=True, value=torch.zeros(size=[10, 10]))],
outputs=[graph.create_variable()])
graph.create_operation(op_type='Add', name='add', platform=TargetPlatform.UNSPECIFIED,
inputs=[matmul.outputs[0], graph.create_variable(is_parameter=True, value=torch.zeros(size=[1, ]))],
outputs=[graph.create_variable()])
processor = GraphMerger(graph)
processor.fuse_gemm()
assert len(graph.operations) == 2
assert len(graph.operations['matmul'].inputs) == 2
assert graph.operations['matmul'].type == 'Gemm' | 48.892857 | 132 | 0.665084 | 306 | 2,738 | 5.820261 | 0.143791 | 0.135879 | 0.160022 | 0.086468 | 0.919708 | 0.905109 | 0.884896 | 0.884896 | 0.884896 | 0.884896 | 0 | 0.011292 | 0.191381 | 2,738 | 56 | 133 | 48.892857 | 0.793135 | 0 | 0 | 0.714286 | 0 | 0 | 0.044542 | 0 | 0 | 0 | 0 | 0 | 0.183673 | 1 | 0 | false | 0 | 0.081633 | 0 | 0.081633 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8672d71454d8b9d7ccb09311a57918bffcddaa27 | 5,397 | py | Python | src/copter.py | k-fligen/drone_contest | 219babe20dad01d41a0f0cc3d790c01cb5ecf43d | [
"MIT"
] | null | null | null | src/copter.py | k-fligen/drone_contest | 219babe20dad01d41a0f0cc3d790c01cb5ecf43d | [
"MIT"
] | 3 | 2019-10-12T16:21:37.000Z | 2019-10-12T16:23:57.000Z | src/copter.py | k-fligen/drone_contest | 219babe20dad01d41a0f0cc3d790c01cb5ecf43d | [
"MIT"
] | 2 | 2019-10-17T04:17:17.000Z | 2020-02-17T04:30:31.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from mode import mode
from dronekit import connect, VehicleMode
from servo import servo
import time
from detect import detect
from scheduler import Scheduler
from rocking_wings import rocking_wings
from led import led
class copter():
def setup(self):
print("Connecting")
self.vehicle = connect('/dev/ttyS0', wait_ready=True,baud=57600,rate=2,use_native=True,heartbeat_timeout=-1)
self.MODE=mode(self.vehicle)
self.CAMERA=detect()
self.led=led()
self.ROCK=rocking_wings(self.vehicle)
self.count=0
self.flag=0
self.flag_count=25
self.dt=0.1
self.RED_CIRCLE=0
self.BLUE_SQUARE=0
self.mode_thread=Scheduler(self.MODE.updateMode,0.5)
self.rock_thread=Scheduler(self.ROCK.run,0.25)
self.mode_thread.start()
print("Complite Initial Setup")
self.led.flash_second(3)
def setup_read_motion(self):
print("Connecting")
self.vehicle = connect('/dev/ttyS0', wait_ready=True,baud=57600,rate=2,use_native=True,heartbeat_timeout=-1)
self.MODE=mode(self.vehicle)
self.CAMERA=detect()
self.led=led()
self.ROCK=rocking_wings(self.vehicle)
self.count=0
self.flag=0
self.flag_count=25
self.dt=0.1
self.RED_CIRCLE=0
self.BLUE_SQUARE=0
self.mode_thread=Scheduler(self.MODE.updateMode,0.5)
self.rock_thread=Scheduler(self.ROCK.read_motion,0.25)
self.mode_thread.start()
print("Complite Initial Setup")
self.led.flash_second(3)
def loop(self):
t=time.time()
if self.MODE.CAMERA==True and self.MODE.ROCKING_WINGS==False:
#detect circle and square
self.RED_CIRCLE,self.BLUE_SQUARE=self.CAMERA.detect_all()
self.led.blink(self.RED_CIRCLE,self.BLUE_SQUARE)
if self.MODE.CAMERA==False and self.MODE.ROCKING_WINGS==False:
self.led.off_both()
if self.MODE.CAMERA==False and self.MODE.ROCKING_WINGS==True and self.rock_thread.state==0:
print("Rocking Wings!!")
self.led.blink_all()
self.rock_thread.start()
if self.MODE.ROCKING_WINGS==False and self.rock_thread.state==1:
print("End Rocking Wings")
self.led.off_both()
self.rock_thread.stop()
self.ROCK.clear()
self.rock_thread=Scheduler(self.ROCK.run,0.25)
if self.MODE.RCSAFETY == 1:
#self.vehicle.channels.overrides['3']=950
self.vehicle.armed=False
self.count=self.count+1
if self.count>self.flag_count:
self.flag=True
#print(time.time()-t)
if time.time()-t<self.dt:
time.sleep(self.dt-time.time()+t)
return self.flag
def end(self):
print("End Start")
self.led.flash_second(3)
self.mode_thread.stop()
if self.rock_thread.state==1:
self.rock_thread.stop()
if len(self.ROCK.motion_read_data) > 3:
self.ROCK.save_motion()
self.vehicle.close()
self.CAMERA.cam.release()
print("Completed")
def loop_read_motion(self):
t=time.time()
if self.MODE.CAMERA==True and self.MODE.ROCKING_WINGS==False:
#detect circle and square
self.RED_CIRCLE,self.BLUE_SQUARE=self.CAMERA.detect_all()
self.led.blink(self.RED_CIRCLE,self.BLUE_SQUARE)
if self.MODE.CAMERA==False and self.MODE.ROCKING_WINGS==False:
self.led.off_both()
if self.MODE.CAMERA==False and self.MODE.ROCKING_WINGS==True and self.rock_thread.state==0:
print("Rocking Wings!!")
self.led.blink_all()
self.rock_thread.start()
if self.MODE.ROCKING_WINGS==False and self.rock_thread.state==1:
print("End Rocking Wings")
self.led.off_both()
self.rock_thread.stop()
self.rock_thread=Scheduler(self.ROCK.read_motion,0.25)
if self.MODE.RCSAFETY == 1:
#self.vehicle.channels.overrides['3']=950
self.vehicle.armed=False
self.count=self.count+1
if self.count>self.flag_count:
self.flag=True
#print(time.time()-t)
if time.time()-t<self.dt:
time.sleep(self.dt-time.time()+t)
return self.flag
def end(self):
print("End Start")
self.led.flash_second(3)
self.mode_thread.stop()
if self.rock_thread.state==1:
self.rock_thread.stop()
if len(self.ROCK.motion_read_data) > 3:
self.ROCK.save_motion()
self.vehicle.close()
self.CAMERA.cam.release()
print("Completed")
if __name__=="__main__":
COPTER=copter()
COPTER.setup()
while(True):#COPTER.CAMERA.cam.isOpened()):
try:
t=time.time()
flag=COPTER.loop()
#print("SERVO:%d" % COPTER.MODE.SERVO,time.time()-t)
#print("R:",COPTER.RED_CIRCLE,"B:",COPTER.BLUE_SQUARE)
if flag == True:
print("Safety")
break
except KeyboardInterrupt:
print("KeyboardInterrupt")
break
COPTER.end() | 31.934911 | 124 | 0.59181 | 710 | 5,397 | 4.371831 | 0.146479 | 0.069588 | 0.072165 | 0.051546 | 0.833119 | 0.833119 | 0.833119 | 0.833119 | 0.833119 | 0.821198 | 0 | 0.018917 | 0.284973 | 5,397 | 169 | 125 | 31.934911 | 0.785437 | 0.063739 | 0 | 0.807692 | 0 | 0 | 0.042633 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.046154 | false | 0 | 0.061538 | 0 | 0.130769 | 0.107692 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
86897cf3ba220bcd75cf9ba21b8113cbfc1202b8 | 166 | py | Python | vbio/handlers/__init__.py | yilbegan/vbio | 5041a1ac6e554a107f57433372bcdfad6a6a4e0b | [
"MIT"
] | 19 | 2019-04-14T14:52:27.000Z | 2022-01-03T19:16:42.000Z | vbio/handlers/__init__.py | yilbegan/vbio | 5041a1ac6e554a107f57433372bcdfad6a6a4e0b | [
"MIT"
] | 3 | 2019-05-04T15:41:35.000Z | 2019-09-09T22:42:25.000Z | vbio/handlers/__init__.py | yilbegan/vbio | 5041a1ac6e554a107f57433372bcdfad6a6a4e0b | [
"MIT"
] | 4 | 2019-08-20T10:33:01.000Z | 2021-08-31T07:57:41.000Z | # -*- encoding: utf-8 -*-
from vbio.handlers.longpoll import LongPollClient
try:
from vbio.handlers.flask import FlaskServer
except ImportError:
pass
| 20.75 | 50 | 0.710843 | 19 | 166 | 6.210526 | 0.789474 | 0.135593 | 0.271186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007519 | 0.198795 | 166 | 7 | 51 | 23.714286 | 0.879699 | 0.138554 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.2 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
86904553e6803531ce1289d58f19f48ff43d049c | 128 | py | Python | extra_envs/extra_envs/envs/__init__.py | Fanshaoliu/safe_rl | 16ab54bebb70a86a80e1bfadb62656afb1547965 | [
"MIT"
] | 13 | 2021-06-19T03:19:36.000Z | 2022-03-29T10:44:37.000Z | extra_envs/extra_envs/envs/__init__.py | Fanshaoliu/safe_rl | 16ab54bebb70a86a80e1bfadb62656afb1547965 | [
"MIT"
] | 5 | 2021-06-16T20:06:51.000Z | 2021-12-14T22:55:54.000Z | extra_envs/extra_envs/envs/__init__.py | Fanshaoliu/safe_rl | 16ab54bebb70a86a80e1bfadb62656afb1547965 | [
"MIT"
] | 4 | 2021-11-03T13:30:08.000Z | 2022-01-05T11:16:47.000Z | from extra_envs.envs.point import PointEnv
from extra_envs.envs.half_cheetah import HalfCheetahEnv, HalfCheetahUnconstrainedEnv
| 42.666667 | 84 | 0.890625 | 16 | 128 | 6.9375 | 0.625 | 0.162162 | 0.234234 | 0.306306 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070313 | 128 | 2 | 85 | 64 | 0.932773 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
86b75a075b8011a4635842316bef8d84057207ad | 19,611 | py | Python | isi_sdk_8_0/isi_sdk_8_0/api/zones_api.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_8_0/isi_sdk_8_0/api/zones_api.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_8_0/isi_sdk_8_0/api/zones_api.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 3
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from isi_sdk_8_0.api_client import ApiClient
class ZonesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_zone(self, zone, **kwargs): # noqa: E501
"""create_zone # noqa: E501
Create a new access zone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_zone(zone, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZoneCreateParams zone: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_zone_with_http_info(zone, **kwargs) # noqa: E501
else:
(data) = self.create_zone_with_http_info(zone, **kwargs) # noqa: E501
return data
def create_zone_with_http_info(self, zone, **kwargs): # noqa: E501
"""create_zone # noqa: E501
Create a new access zone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_zone_with_http_info(zone, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ZoneCreateParams zone: (required)
:return: CreateResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['zone'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_zone" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'zone' is set
if ('zone' not in params or
params['zone'] is None):
raise ValueError("Missing the required parameter `zone` when calling `create_zone`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'zone' in params:
body_params = params['zone']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/zones', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CreateResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_zone(self, zone_id, **kwargs): # noqa: E501
"""delete_zone # noqa: E501
Delete the access zone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_zone(zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int zone_id: Delete the access zone. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_zone_with_http_info(zone_id, **kwargs) # noqa: E501
else:
(data) = self.delete_zone_with_http_info(zone_id, **kwargs) # noqa: E501
return data
def delete_zone_with_http_info(self, zone_id, **kwargs): # noqa: E501
"""delete_zone # noqa: E501
Delete the access zone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_zone_with_http_info(zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int zone_id: Delete the access zone. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['zone_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_zone" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'zone_id' is set
if ('zone_id' not in params or
params['zone_id'] is None):
raise ValueError("Missing the required parameter `zone_id` when calling `delete_zone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'zone_id' in params:
path_params['ZoneId'] = params['zone_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/zones/{ZoneId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_zone(self, zone_id, **kwargs): # noqa: E501
"""get_zone # noqa: E501
Retrieve the access zone information. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zone(zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int zone_id: Retrieve the access zone information. (required)
:return: Zones
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_zone_with_http_info(zone_id, **kwargs) # noqa: E501
else:
(data) = self.get_zone_with_http_info(zone_id, **kwargs) # noqa: E501
return data
def get_zone_with_http_info(self, zone_id, **kwargs): # noqa: E501
"""get_zone # noqa: E501
Retrieve the access zone information. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_zone_with_http_info(zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int zone_id: Retrieve the access zone information. (required)
:return: Zones
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['zone_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_zone" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'zone_id' is set
if ('zone_id' not in params or
params['zone_id'] is None):
raise ValueError("Missing the required parameter `zone_id` when calling `get_zone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'zone_id' in params:
path_params['ZoneId'] = params['zone_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/zones/{ZoneId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Zones', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_zones(self, **kwargs): # noqa: E501
"""list_zones # noqa: E501
List all access zones. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_zones(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ZonesExtended
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_zones_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_zones_with_http_info(**kwargs) # noqa: E501
return data
def list_zones_with_http_info(self, **kwargs): # noqa: E501
"""list_zones # noqa: E501
List all access zones. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_zones_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ZonesExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_zones" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/zones', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ZonesExtended', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_zone(self, zone, zone_id, **kwargs): # noqa: E501
"""update_zone # noqa: E501
Modify the access zone. All input fields are optional, but one or more must be supplied. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_zone(zone, zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Zone zone: (required)
:param int zone_id: Modify the access zone. All input fields are optional, but one or more must be supplied. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_zone_with_http_info(zone, zone_id, **kwargs) # noqa: E501
else:
(data) = self.update_zone_with_http_info(zone, zone_id, **kwargs) # noqa: E501
return data
def update_zone_with_http_info(self, zone, zone_id, **kwargs): # noqa: E501
"""update_zone # noqa: E501
Modify the access zone. All input fields are optional, but one or more must be supplied. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_zone_with_http_info(zone, zone_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Zone zone: (required)
:param int zone_id: Modify the access zone. All input fields are optional, but one or more must be supplied. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['zone', 'zone_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_zone" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'zone' is set
if ('zone' not in params or
params['zone'] is None):
raise ValueError("Missing the required parameter `zone` when calling `update_zone`") # noqa: E501
# verify the required parameter 'zone_id' is set
if ('zone_id' not in params or
params['zone_id'] is None):
raise ValueError("Missing the required parameter `zone_id` when calling `update_zone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'zone_id' in params:
path_params['ZoneId'] = params['zone_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'zone' in params:
body_params = params['zone']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/3/zones/{ZoneId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.001887 | 127 | 0.59793 | 2,288 | 19,611 | 4.878934 | 0.075612 | 0.056616 | 0.025083 | 0.032249 | 0.949386 | 0.944549 | 0.934695 | 0.923408 | 0.920451 | 0.913822 | 0 | 0.018536 | 0.30952 | 19,611 | 529 | 128 | 37.071834 | 0.805849 | 0.327061 | 0 | 0.802867 | 1 | 0 | 0.172533 | 0.033633 | 0 | 0 | 0 | 0 | 0 | 1 | 0.039427 | false | 0 | 0.014337 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
86c208a54ef266781c7d4c94de38ee996842c54b | 138 | py | Python | gmailutils/src/gmailutils/easygmail.py | t-igu/vscode-remote-container-example | 7022cd6fe662eb0422f23e55fc4f6d1a49f3f722 | [
"MIT"
] | null | null | null | gmailutils/src/gmailutils/easygmail.py | t-igu/vscode-remote-container-example | 7022cd6fe662eb0422f23e55fc4f6d1a49f3f722 | [
"MIT"
] | null | null | null | gmailutils/src/gmailutils/easygmail.py | t-igu/vscode-remote-container-example | 7022cd6fe662eb0422f23e55fc4f6d1a49f3f722 | [
"MIT"
] | null | null | null | from .easygmail_get import EasyGmailGet
from .easygmail_send import EasyGmailSend
class EasyGmail(EasyGmailGet, EasyGmailSend):
pass
| 23 | 45 | 0.833333 | 15 | 138 | 7.533333 | 0.6 | 0.230089 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.123188 | 138 | 5 | 46 | 27.6 | 0.933884 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.25 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
810918ea729f425c46cdbb6eb908c5261e29ee15 | 167 | py | Python | tools/__init__.py | celbig/finer_package_settings | 9c49bd070df8eb5e464c85ec1d3c9b5ef29ea1e4 | [
"CC-BY-4.0"
] | null | null | null | tools/__init__.py | celbig/finer_package_settings | 9c49bd070df8eb5e464c85ec1d3c9b5ef29ea1e4 | [
"CC-BY-4.0"
] | null | null | null | tools/__init__.py | celbig/finer_package_settings | 9c49bd070df8eb5e464c85ec1d3c9b5ef29ea1e4 | [
"CC-BY-4.0"
] | null | null | null | from .settings import load_current_settings
from .settings import process_package_settings
from .settings import clear_all_locks
from .exceptions import InvalidConfig
| 33.4 | 46 | 0.88024 | 22 | 167 | 6.409091 | 0.545455 | 0.255319 | 0.382979 | 0.368794 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.095808 | 167 | 4 | 47 | 41.75 | 0.933775 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
8118868fb38dfe07eb97b371a141a286ca2fa4c1 | 11,326 | py | Python | admin_tests/rdm_addons/api_v1/test_views.py | yuanyuan-deng/RDM-osf.io | e1c54e97c898d26406d71129db7e4baf82802224 | [
"Apache-2.0"
] | null | null | null | admin_tests/rdm_addons/api_v1/test_views.py | yuanyuan-deng/RDM-osf.io | e1c54e97c898d26406d71129db7e4baf82802224 | [
"Apache-2.0"
] | 8 | 2018-11-09T05:57:09.000Z | 2019-07-25T10:27:55.000Z | admin_tests/rdm_addons/api_v1/test_views.py | yuanyuan-deng/RDM-osf.io | e1c54e97c898d26406d71129db7e4baf82802224 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import json
from nose import tools as nt
from django.test import RequestFactory
from django.http import Http404
from tests.base import AdminTestCase
from osf_tests.factories import (
AuthUserFactory,
InstitutionFactory,
ExternalAccountFactory,
)
from admin_tests.utilities import setup_user_view
from admin.rdm_addons.api_v1 import views
from admin_tests.rdm_addons import factories as rdm_addon_factories
class TestOAuthView(AdminTestCase):
def setUp(self):
super(TestOAuthView, self).setUp()
self.user = AuthUserFactory()
self.external_account = ExternalAccountFactory()
self.rdm_addon_option = rdm_addon_factories.RdmAddonOptionFactory()
self.rdm_addon_option.provider = self.external_account.provider
self.rdm_addon_option.external_accounts.add(self.external_account)
self.rdm_addon_option.save()
self.user.affiliated_institutions.add(self.rdm_addon_option.institution)
self.user.external_accounts.add(self.external_account)
self.user.save()
self.request = RequestFactory().get('/fake_path')
self.view = views.OAuthView()
self.view = setup_user_view(self.view, self.request, user=self.user)
self.view.kwargs = {
'external_account_id': self.external_account._id,
'institution_id': self.rdm_addon_option.institution.id,
}
def tearDown(self):
super(TestOAuthView, self).tearDown()
institution = self.rdm_addon_option.institution
self.user.affiliated_institutions.remove(institution)
if self.user.external_accounts.filter(pk=self.external_account.id).exists():
self.user.external_accounts.remove(self.external_account)
self.user.delete()
if self.rdm_addon_option.external_accounts.filter(pk=self.external_account.id).exists():
self.rdm_addon_option.external_accounts.remove(self.external_account)
self.rdm_addon_option.delete()
institution.delete()
self.external_account.delete()
def test_super_admin_login(self):
"""test superuser login"""
self.request.user.is_superuser = True
nt.assert_true(self.view.test_func())
def test_admin_login(self):
"""test institution administrator login """
self.request.user.is_superuser = False
self.request.user.is_staff = True
nt.assert_true(self.view.test_func())
def test_non_admin_login(self):
"""test user not superuser or institution administrator login"""
self.request.user.is_superuser = False
self.request.user.is_staff = False
nt.assert_equal(self.view.test_func(), False)
def test_non_active_user_login(self):
"""test invalid user login"""
self.request.user.is_active = False
nt.assert_equal(self.view.test_func(), False)
def test_non_registered_user_login(self):
"""test unregistered user login"""
self.request.user.is_registered = False
nt.assert_equal(self.view.test_func(), False)
def test_non_affiliated_institution_user_login(self):
"""test unaffiliated institution user login"""
self.request.user.is_superuser = False
self.request.user.is_staff = True
self.view.kwargs['institution_id'] = self.rdm_addon_option.institution.id + 1
nt.assert_equal(self.view.test_func(), False)
self.view.kwargs['institution_id'] = self.rdm_addon_option.institution.id
def test_delete(self, *args, **kwargs):
self.request.user.is_superuser = False
self.request.user.is_staff = True
nt.assert_equal(self.user.external_accounts.count(), 1)
nt.assert_equal(self.rdm_addon_option.external_accounts.count(), 1)
self.view.delete(self.request, *args, **self.view.kwargs)
nt.assert_equal(self.user.external_accounts.count(), 0)
nt.assert_equal(self.rdm_addon_option.external_accounts.count(), 0)
def test_delete_dummy(self, *args, **kwargs):
self.view.kwargs['external_account_id'] = self.external_account._id + 'dummy'
with self.assertRaises(Http404):
self.view.delete(self.request, *args, **self.view.kwargs)
self.view.kwargs['external_account_id'] = self.external_account._id
def test_delete_empty(self, *args, **kwargs):
self.rdm_addon_option.external_accounts.remove(self.external_account)
with self.assertRaises(Http404):
self.view.delete(self.request, *args, **self.view.kwargs)
class TestSettingsView(AdminTestCase):
def setUp(self):
super(TestSettingsView, self).setUp()
self.user = AuthUserFactory()
self.institution = InstitutionFactory()
self.user.affiliated_institutions.add(self.institution)
self.request = RequestFactory().get('/fake_path')
self.view = views.SettingsView()
self.view = setup_user_view(self.view, self.request, user=self.user)
self.view.kwargs = {
'addon_name': 'dataverse',
'institution_id': self.institution.id,
}
def tearDown(self):
super(TestSettingsView, self).tearDown()
self.user.affiliated_institutions.remove()
self.user.delete()
self.institution.delete()
def test_super_admin_login(self):
"""test superuser login"""
self.request.user.is_superuser = True
nt.assert_true(self.view.test_func())
def test_admin_login(self):
"""test institution administrator login"""
self.request.user.is_superuser = False
self.request.user.is_staff = True
nt.assert_true(self.view.test_func())
def test_non_admin_login(self):
"""test user not superuser or institution administrator login"""
self.request.user.is_superuser = False
self.request.user.is_staff = False
nt.assert_equal(self.view.test_func(), False)
def test_non_active_user_login(self):
"""test invalid user login"""
self.request.user.is_active = False
nt.assert_equal(self.view.test_func(), False)
def test_non_registered_user_login(self):
"""test unregistered user login"""
self.request.user.is_registered = False
nt.assert_equal(self.view.test_func(), False)
def test_non_affiliated_institution_user_login(self):
"""test user unaffiliated institution login"""
self.request.user.is_superuser = False
self.request.user.is_staff = True
self.view.kwargs = {'institution_id': self.institution.id + 1}
nt.assert_equal(self.view.test_func(), False)
def test_get_dataverse(self, *args, **kwargs):
self.request.user.is_superuser = False
self.request.user.is_staff = True
res = self.view.get(self.request, *args, **self.view.kwargs)
nt.assert_equal(res.status_code, 200)
nt.assert_true('result' in res.content)
def test_get_dummy_addon(self, *args, **kwargs):
self.request.user.is_superuser = False
self.request.user.is_staff = True
self.view.kwargs['addon_name'] = 'dummy'
res = self.view.get(self.request, *args, **self.view.kwargs)
nt.assert_equal(res.status_code, 200)
self.assertJSONEqual(res.content, {})
class TestAccountsView(AdminTestCase):
def setUp(self):
super(TestAccountsView, self).setUp()
self.user = AuthUserFactory()
self.external_account = ExternalAccountFactory()
self.rdm_addon_option = rdm_addon_factories.RdmAddonOptionFactory()
self.rdm_addon_option.provider = self.external_account.provider
self.rdm_addon_option.external_accounts.add(self.external_account)
self.rdm_addon_option.save()
self.user.affiliated_institutions.add(self.rdm_addon_option.institution)
self.user.external_accounts.add(self.external_account)
self.user.save()
self.request = RequestFactory().get('/fake_path')
self.view = views.AccountsView()
self.view = setup_user_view(self.view, self.request, user=self.user)
self.view.kwargs = {
'addon_name': self.external_account.provider,
'institution_id': self.rdm_addon_option.institution.id,
}
def tearDown(self):
super(TestAccountsView, self).tearDown()
institution = self.rdm_addon_option.institution
self.user.affiliated_institutions.remove(institution)
if self.user.external_accounts.filter(pk=self.external_account.id).exists():
self.user.external_accounts.remove(self.external_account)
self.user.delete()
if self.rdm_addon_option.external_accounts.filter(pk=self.external_account.id).exists():
self.rdm_addon_option.external_accounts.remove(self.external_account)
self.rdm_addon_option.delete()
institution.delete()
self.external_account.delete()
def test_super_admin_login(self):
"""test superuser login"""
self.request.user.is_superuser = True
nt.assert_true(self.view.test_func())
def test_admin_login(self):
"""test institution administrator login"""
self.request.user.is_superuser = False
self.request.user.is_staff = True
nt.assert_true(self.view.test_func())
def test_non_admin_login(self):
"""test user not superuser or institution administrator login"""
self.request.user.is_superuser = False
self.request.user.is_staff = False
nt.assert_equal(self.view.test_func(), False)
def test_non_active_user_login(self):
"""test invalid user login"""
self.request.user.is_active = False
nt.assert_equal(self.view.test_func(), False)
def test_non_registered_user_login(self):
"""test unregistered user login"""
self.request.user.is_registered = False
nt.assert_equal(self.view.test_func(), False)
def test_non_affiliated_institution_user_login(self):
"""test user unaffiliated institution login"""
self.request.user.is_superuser = False
self.request.user.is_staff = True
self.view.kwargs = {'institution_id': self.rdm_addon_option.institution.id + 1}
nt.assert_equal(self.view.test_func(), False)
def test_get(self, *args, **kwargs):
res = self.view.get(self.request, *args, **self.view.kwargs)
nt.assert_equal(res.status_code, 200)
content = json.loads(res.content)
nt.assert_equal(len(content['accounts']), 1)
def test_post_empty(self, *args, **kwargs):
self.request = RequestFactory().post(
'/fake',
data=json.dumps({}),
content_type='application/json'
)
self.view.kwargs['addon_name'] = 'dummy'
res = self.view.post(self.request, *args, **self.view.kwargs)
nt.assert_equal(res.status_code, 400)
def test_post_fake_s3_account(self, *args, **kwargs):
self.request = RequestFactory().post(
'/fake',
data=json.dumps({'access_key': 'aaa', 'secret_key': 'bbb'}),
content_type='application/json'
)
self.view.kwargs['addon_name'] = 's3'
res = self.view.post(self.request, *args, **self.view.kwargs)
nt.assert_equal(res.status_code, 400)
| 40.45 | 96 | 0.679763 | 1,408 | 11,326 | 5.247159 | 0.083807 | 0.059556 | 0.073092 | 0.075934 | 0.871142 | 0.848403 | 0.833514 | 0.833514 | 0.824986 | 0.794532 | 0 | 0.004003 | 0.205898 | 11,326 | 279 | 97 | 40.594982 | 0.817434 | 0.05792 | 0 | 0.704225 | 0 | 0 | 0.032423 | 0 | 0 | 0 | 0 | 0 | 0.150235 | 1 | 0.150235 | false | 0 | 0.042254 | 0 | 0.206573 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
812d2ec4711d3d332185c210c740df186e0ba286 | 36,891 | py | Python | python/src/main/python/pygw/query/vector/filter_factory.py | jhickman-prominent/geowave | fb421588e22a7c68ef13be1e57bc2c674dd1b090 | [
"Apache-2.0"
] | null | null | null | python/src/main/python/pygw/query/vector/filter_factory.py | jhickman-prominent/geowave | fb421588e22a7c68ef13be1e57bc2c674dd1b090 | [
"Apache-2.0"
] | 2 | 2019-09-20T15:39:27.000Z | 2019-12-03T14:07:43.000Z | python/src/main/python/pygw/query/vector/filter_factory.py | jhickman-prominent/geowave | fb421588e22a7c68ef13be1e57bc2c674dd1b090 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2013-2019 Contributors to the Eclipse Foundation
#
# See the NOTICE file distributed with this work for additional information regarding copyright
# ownership. All rights reserved. This program and the accompanying materials are made available
# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
# available at http://www.apache.org/licenses/LICENSE-2.0.txt
#===============================================================================================
from datetime import datetime
from shapely.geometry.base import BaseGeometry
from pygw.base import GeoWaveObject
from pygw.base.type_conversions import GeometryType
from pygw.config import java_gateway
from pygw.config import java_pkg
from pygw.config import reflection_util
def _j_match_action(match_action):
return java_pkg.org.opengis.filter.MultiValuedFilter.MatchAction.valueOf(match_action.upper())
# These functions are needed in order to invoke java methods that are named with
# reserved python keywords such as and, or, and not
def _invoke_filter_list_method_by_name(j_filter_factory, name, filters):
filter_factory_class = j_filter_factory.getClass()
list_class = reflection_util.classForName("java.util.List")
class_array = java_gateway.new_array(java_pkg.java.lang.Class, 1)
class_array[0] = list_class
method = filter_factory_class.getMethod(name, class_array)
filter_list = java_pkg.java.util.ArrayList()
for filter in filters:
filter_list.append(filter)
objects_array = java_gateway.new_array(java_pkg.java.lang.Object, 1)
objects_array[0] = filter_list
return method.invoke(j_filter_factory, objects_array)
def _invoke_filter_method_by_name(j_filter_factory, name, filter):
filter_factory_class = j_filter_factory.getClass()
filter_class = reflection_util.classForName("org.opengis.filter.Filter")
class_array = java_gateway.new_array(java_pkg.java.lang.Class, 1)
class_array[0] = filter_class
method = filter_factory_class.getMethod(name, class_array)
objects_array = java_gateway.new_array(java_pkg.java.lang.Object, 1)
objects_array[0] = filter
return method.invoke(j_filter_factory, objects_array)
class FilterFactory(GeoWaveObject):
"""
Filter factory for constructing filters to be used in vector queries. Methods
of this factory generally return either a Filter or Expression which can be used
in additional method calls.
"""
def __init__(self):
j_filter_factory = java_pkg.org.geotools.filter.FilterFactoryImpl()
super().__init__(j_filter_factory)
def id(self, fids):
"""
Constructs a filter that matches a set of feature IDs.
Args:
fids (list of str): The list of feature IDs to match.
Returns:
A Filter with the given feature IDs.
"""
j_fids = java_gateway.new_array(java_pkg.org.opengis.filter.identity.FeatureId, len(fids))
for idx, fid in enumerate(fids):
if isinstance(fid, str):
j_fids[idx] = self.feature_id(fid)
else:
j_fids[idx] = fid
return self._java_ref.id(j_fids)
def feature_id(self, id):
"""
Constructs a filter that matches a specific feature ID.
Args:
id (str): The feature ID.
Returns:
A Filter with the given feature ID.
"""
return self._java_ref.featureId(id)
def gml_object_id(self, id):
"""
Constructs a filter that matches a specific gml object ID.
Args:
id (str): The gml object ID.
Returns:
A Filter with the given gml object ID.
"""
return self._java_ref.gmlObjectId(id)
def property(self, name):
"""
Constructs an expression that references the given property name.
Args:
name (str): The property name.
Returns:
An Expression with the given property name.
"""
return self._java_ref.property(name)
def literal(self, value):
"""
Constructs an expression with the given literal value.
Args:
value (any): The literal value to use.
Returns:
An Expression with the given literal value.
"""
if isinstance(value, datetime):
# Convert the date to a string
value = value.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(value, str):
# Prevent Py4J from assuming the string matches up with the char variant method
filter_factory_class = self._java_ref.getClass()
object_class = reflection_util.classForName("java.lang.Object")
class_array = java_gateway.new_array(java_pkg.java.lang.Class, 1)
class_array[0] = object_class
method = filter_factory_class.getMethod("literal", class_array)
objects_array = java_gateway.new_array(java_pkg.java.lang.Object, 1)
objects_array[0] = value
return method.invoke(self._java_ref, objects_array)
if isinstance(value, BaseGeometry):
return self._java_ref.literal(GeometryType().to_java(value))
return self._java_ref.literal(value)
def add(self, expr1, expr2):
"""
Constructs an expression which adds two other expressions.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
Returns:
An Expression which represents [expr1 + expr2].
"""
return self._java_ref.add(expr1, expr2)
def subtract(self, expr1, expr2):
"""
Constructs an expression which subtracts one expression from another.
Args:
expr1 (Expression): The expression to subtract from.
expr2 (Expression): The expression to subtract.
Returns:
An Expression which represents [expr1 - expr2].
"""
return self._java_ref.subtract(expr1, expr2)
def multiply(self, expr1, expr2):
"""
Constructs an expression which multiplies two other expressions.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
Returns:
An Expression which represents [expr1 * expr2].
"""
return self._java_ref.multiply(expr1, expr2)
def divide(self, expr1, expr2):
"""
Constructs an expression which divides one expression by another.
Args:
expr1 (Expression): The expression to divide.
expr2 (Expression): The expression to divide by.
Returns:
An Expression which represents [expr1 / expr2].
"""
return self._java_ref.divide(expr1, expr2)
def function(self, name, expressions):
"""
Constructs an expression by passing a set of expressions to an expression function.
Args:
name (str): The name of the function.
expressions (list of Expression): The expressions to use in the function.
Returns:
An Expression which represents the result of the function.
"""
j_expressions = java_gateway.new_array(java_pkg.org.opengis.filter.expression.Expression, len(expressions))
for idx, expression in enumerate(expressions):
j_expressions[idx] = expression
return self._java_ref.function(name, j_expressions)
def and_(self, filters):
"""
Constructs a filter which passes when all given filters pass.
Args:
filters (list of Filter): The filters to check.
Returns:
A Filter that passes when all given Filters pass.
"""
return _invoke_filter_list_method_by_name(self._java_ref, "and", filters)
def or_(self, filters):
"""
Constructs a filter which passes when any of the given filters pass.
Args:
filters (list of Filter): The filters to check.
Returns:
A Filter that passes when one of the given Filters pass.
"""
return _invoke_filter_list_method_by_name(self._java_ref, "or", filters)
def not_(self, filter):
"""
Constructs a filter that passes when the given filter does NOT pass.
Args:
filter (Filter): The filter to check.
Returns:
A Filter that passes when the given filter does NOT pass.
"""
return _invoke_filter_method_by_name(self._java_ref, "not", filter)
def between(self, expr, lower, upper, match_action=None):
"""
Constructs a filter that passes when the given expression falls between a
lower and upper expression.
Args:
expr (Expression): The expression to check.
lower (Expression): The lower bound.
upper (Expression): The upper bound.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the given expression falls between a
lower and upper expression.
"""
if match_action is None:
return self._java_ref.between(expr, lower, upper)
else:
return self._java_ref.between(expr, lower, upper, _j_match_action(match_action))
def equals(self, expr1, expr2):
"""
Constructs a filter that passes when the given expressions are equal.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
Returns:
A Filter that passes when the given expressions are equal.
"""
return self._java_ref.equals(expr1, expr2)
def equal(self, expr1, expr2, match_case, match_action=None):
"""
Constructs a filter that passes when the given expressions are equal.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
match_case (bool): Whether or not to match case with strings.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the given expressions are equal.
"""
if match_action is None:
return self._java_ref.equal(expr1, expr2, match_case)
else:
return self._java_ref.equal(expr1, expr2, match_case, _j_match_action(match_action))
def not_equals(self, expr1, expr2):
"""
Constructs a filter that passes when the given expressions are NOT equal.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
Returns:
A Filter that passes when the given expressions are NOT equal.
"""
return self._java_ref.notEqual(expr1, expr2)
def not_equal(self, expr1, expr2, match_case, match_action=None):
"""
Constructs a filter that passes when the given expressions are NOT equal.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
match_case (bool): Whether or not to match case with strings.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the given expressions are NOT equal.
"""
if match_action is None:
return self._java_ref.notEqual(expr1, expr2, match_case)
else:
return self._java_ref.notEqual(expr1, expr2, match_case, _j_match_action(match_action))
def greater(self, expr1, expr2, match_case=None, match_action=None):
"""
Constructs a filter that passes when the first expression is greater than
the second.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
match_case (bool): Whether or not to match case with strings. Default is None.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the first expression is greater than the
second.
"""
if match_case is None:
return self._java_ref.greater(expr1, expr2)
elif match_action is None:
return self._java_ref.greater(expr1, expr2, match_case)
else:
return self._java_ref.greater(expr1, expr2, match_case, _j_match_action(match_action))
def greater_or_equal(self, expr1, expr2, match_case=None, match_action=None):
"""
Constructs a filter that passes when the first expression is greater than
or equal to the second.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
match_case (bool): Whether or not to match case with strings. Default is None.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the first expression is greater than or equal
to the second.
"""
if match_case is None:
return self._java_ref.greaterOrEqual(expr1, expr2)
elif match_action is None:
return self._java_ref.greaterOrEqual(expr1, expr2, match_case)
else:
return self._java_ref.greaterOrEqual(expr1, expr2, match_case, _j_match_action(match_action))
def less(self, expr1, expr2, match_case=None, match_action=None):
"""
Constructs a filter that passes when the first expression is less than
the second.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
match_case (bool): Whether or not to match case with strings. Default is None.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the first expression is less than the
second.
"""
if match_case is None:
return self._java_ref.less(expr1, expr2)
elif match_action is None:
return self._java_ref.less(expr1, expr2, match_case)
else:
return self._java_ref.less(expr1, expr2, match_case, _j_match_action(match_action))
def less_or_equal(self, expr1, expr2, match_case=None, match_action=None):
"""
Constructs a filter that passes when the first expression is les than
or equal to the second.
Args:
expr1 (Expression): The first expression.
expr2 (Expression): The second expression.
match_case (bool): Whether or not to match case with strings. Default is None.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the first expression is less than or equal
to the second.
"""
if match_case is None:
return self._java_ref.lessOrEqual(expr1, expr2)
elif match_action is None:
return self._java_ref.lessOrEqual(expr1, expr2, match_case)
else:
return self._java_ref.lessOrEqual(expr1, expr2, match_case, _j_match_action(match_action))
def like(self, expr, pattern, wildcard=None, single_char=None, escape=None, match_case=None, match_action=None):
"""
Constructs a filter with character string comparison operator with pattern
matching and specified wildcards.
Args:
expr (Expression): The expression to use.
pattern (str): The pattern to match.
wildcard (str): The string to use to match any characters. Default is None.
single_char (str): The string to use to match a single character. Default is None.
escape (str): The string to use to escape a wildcard. Default is None.
match_case (bool): Whether or not to match case with strings. Default is None.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the first expression is greater than the
second.
"""
if wildcard is None:
return self._java_ref.like(expr, pattern)
elif match_case is None:
return self._java_ref.like(expr, pattern, wildcard, single_char, escape)
elif match_action is None:
return self._java_ref.like(expr, pattern, wildcard, single_char, escape, match_case)
else:
return self._java_ref.like(expr, pattern, wildcard, single_char, escape, match_case, _j_match_action(match_action))
def is_null(self, expr):
"""
Constructs a filter that passes when the given expression is null.
Args:
expr (Expression): The expression to check.
Returns:
A Filter that passes when the given epxression is null.
"""
return self._java_ref.isNull(expr)
def bbox(self, geometry_expr, minx, miny, maxx, maxy, srs, match_action=None):
"""
Constructs a filter that passes when the given geometry expression is within
the given bounding box.
Args:
geometry_expr (Expression): An expression which represents a geometry.
minx (float): The minimum X value of the bounding box.
miny (float): The minimum Y value of the bounding box.
maxx (float): The maximum X value of the bounding box.
maxy (float): The maximum Y value of the bounding box.
srs (str): The spatial reference system of the geometry.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the given geometry is within the bounding box.
"""
if match_action is None:
return self._java_ref.bbox(geometry_expr, minx*1.0, miny*1.0, maxx*1.0, maxy*1.0, srs)
else:
return self._java_ref.bbox(geometry_expr, minx*1.0, miny*1.0, maxx*1.0, maxy*1.0, srs, _j_match_action(match_action))
def bbox_expr(self, geometry_expr, bbox_expr):
"""
Constructs a filter that passes when the given geometry expression is within the
given bounding box expression.
Args:
geometry_expr (Expression): An expression which represents a geometry.
bbox_expr (Expression): An expression which represents a bounding box.
Returns:
A Filter that passes when the given geometry is within the bounding box.
"""
return self._java_ref.bbox(geometry_expr, bbox_expr)
def beyond(self, geometry_expr1, geometry_expr2, distance, units, match_action=None):
"""
Constructs a filter that passes when a given geometry is beyond a certain distance from
a second given geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
distance (float): The distance to use.
units (str): The distance unit.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the geometry is beyond the distance from the second
geometry.
"""
if match_action is None:
return self._java_ref.beyond(geometry_expr1, geometry_expr2, distance*1.0, units)
else:
return self._java_ref.beyond(geometry_expr1, geometry_expr2, distance*1.0, units, _j_match_action(match_action))
def contains(self, geometry_expr1, geometry_expr2, match_action=None):
"""
Constructs a filter that passes when the first geometry expression contains the
second geometry expression.
Args:
geometry_expr1 (Expression): An expression which represents the geometry to check against.
geometry_expr2 (Expression): An expression which represents the geometry to check.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when the second geometry is contained by the first.
"""
if match_action is None:
return self._java_ref.contains(geometry_expr1, geometry_expr2)
else:
return self._java_ref.contains(geometry_expr1, geometry_expr2, _j_match_action(match_action))
def crosses(self, geometry_expr1, geometry_expr2, match_action=None):
"""
Constructs a filter that passes when a given geometry crosses another given geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given geometry crosses another given geometry.
"""
if match_action is None:
return self._java_ref.crosses(geometry_expr1, geometry_expr2)
else:
return self._java_ref.crosses(geometry_expr1, geometry_expr2, _j_match_action(match_action))
def disjoint(self, geometry_expr1, geometry_expr2, match_action=None):
"""
Constructs a filter that passes when a given geometry is disjoint to another given geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given geometry is disjoint to another given geometry.
"""
if match_action is None:
return self._java_ref.disjoint(geometry_expr1, geometry_expr2)
else:
return self._java_ref.disjoint(geometry_expr1, geometry_expr2, _j_match_action(match_action))
def intersects(self, geometry_expr1, geometry_expr2, match_action=None):
"""
Constructs a filter that passes when a given geometry intersects another given geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given geometry intersects another given geometry.
"""
if match_action is None:
return self._java_ref.intersects(geometry_expr1, geometry_expr2)
else:
return self._java_ref.intersects(geometry_expr1, geometry_expr2, _j_match_action(match_action))
def overlaps(self, geometry_expr1, geometry_expr2, match_action=None):
"""
Constructs a filter that passes when a given geometry overlaps another given geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given geometry overlaps another given geometry.
"""
if match_action is None:
return self._java_ref.overlaps(geometry_expr1, geometry_expr2)
else:
return self._java_ref.overlaps(geometry_expr1, geometry_expr2, _j_match_action(match_action))
def touches(self, geometry_expr1, geometry_expr2, match_action=None):
"""
Constructs a filter that passes when a given geometry touches another given geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given geometry touches another given geometry.
"""
if match_action is None:
return self._java_ref.touches(geometry_expr1, geometry_expr2)
else:
return self._java_ref.touches(geometry_expr1, geometry_expr2, _j_match_action(match_action))
def within(self, geometry_expr1, geometry_expr2, match_action=None):
"""
Constructs a filter that passes when a given geometry is within another given geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given geometry is within another given geometry.
"""
if match_action is None:
return self._java_ref.within(geometry_expr1, geometry_expr2)
else:
return self._java_ref.within(geometry_expr1, geometry_expr2, _j_match_action(match_action))
def dwithin(self, geometry_expr1, geometry_expr2, distance, units, match_action=None):
"""
Constructs a filter that passes when a given geometry is within the specified distance
of the second geometry.
Args:
geometry_expr1 (Expression): An expression which represents a geometry.
geometry_expr2 (Expression): An expression which represents a geometry.
distance (float): The distance to use.
units (str): The unit of distance.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given geometry is within the specified distance of the
second geometry.
"""
if match_action is None:
return self._java_ref.dwithin(geometry_expr1, geometry_expr2, distance*1.0, units)
else:
return self._java_ref.dwithin(geometry_expr1, geometry_expr2, distance*1.0, units, _j_match_action(match_action))
def after(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression occurs after
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression occurs after the
second temporal expression.
"""
if match_action is None:
return self._java_ref.after(expr1, expr2)
else:
return self._java_ref.after(expr1, expr2, _j_match_action(match_action))
def any_interacts(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression interacts with
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression interacts with the
second temporal expression.
"""
if match_action is None:
return self._java_ref.anyInteracts(expr1, expr2)
else:
return self._java_ref.anyInteracts(expr1, expr2, _j_match_action(match_action))
def before(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression occurs before
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression occurs before the
second temporal expression.
"""
if match_action is None:
return self._java_ref.before(expr1, expr2)
else:
return self._java_ref.before(expr1, expr2, _j_match_action(match_action))
def begins(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression begins
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression begins the
second temporal expression.
"""
if match_action is None:
return self._java_ref.begins(expr1, expr2)
else:
return self._java_ref.begins(expr1, expr2, _j_match_action(match_action))
def begun_by(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression is begun by
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression is begun by the
second temporal expression.
"""
if match_action is None:
return self._java_ref.begunBy(expr1, expr2)
else:
return self._java_ref.begunBy(expr1, expr2, _j_match_action(match_action))
def during(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression occurs during
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression occurs during the
second temporal expression.
"""
if match_action is None:
return self._java_ref.during(expr1, expr2)
else:
return self._java_ref.during(expr1, expr2, _j_match_action(match_action))
def ended_by(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression is ended by
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression is ended by the
second temporal expression.
"""
if match_action is None:
return self._java_ref.endedBy(expr1, expr2)
else:
return self._java_ref.endedBy(expr1, expr2, _j_match_action(match_action))
def ends(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression ends
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression ends the
second temporal expression.
"""
if match_action is None:
return self._java_ref.ends(expr1, expr2)
else:
return self._java_ref.ends(expr1, expr2, _j_match_action(match_action))
def meets(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression meets
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression meets the
second temporal expression.
"""
if match_action is None:
return self._java_ref.meets(expr1, expr2)
else:
return self._java_ref.meets(expr1, expr2, _j_match_action(match_action))
def met_by(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression is met by
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression is met by the
second temporal expression.
"""
if match_action is None:
return self._java_ref.metBy(expr1, expr2)
else:
return self._java_ref.metBy(expr1, expr2, _j_match_action(match_action))
def overlapped_by(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression is overlapped by
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression is overlapped by the
second temporal expression.
"""
if match_action is None:
return self._java_ref.overlappedBy(expr1, expr2)
else:
return self._java_ref.overlappedBy(expr1, expr2, _j_match_action(match_action))
def tcontains(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression contains
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression contains the
second temporal expression.
"""
if match_action is None:
return self._java_ref.tcontains(expr1, expr2)
else:
return self._java_ref.tcontains(expr1, expr2, _j_match_action(match_action))
def tequals(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression equals
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression equals the
second temporal expression.
"""
if match_action is None:
return self._java_ref.tequals(expr1, expr2)
else:
return self._java_ref.tequals(expr1, expr2, _j_match_action(match_action))
def toverlaps(self, expr1, expr2, match_action=None):
"""
Constructs a filter that passes when a given temporal expression overlaps
a second temporal expression.
Args:
expr1 (Expression): The first temporal expression.
expr2 (Expression): The second temporal expression.
match_action (str): The match action to use. Default is 'ANY'.
Returns:
A Filter that passes when a given temporal expression overlaps the
second temporal expression.
"""
if match_action is None:
return self._java_ref.toverlaps(expr1, expr2)
else:
return self._java_ref.toverlaps(expr1, expr2, _j_match_action(match_action))
| 42.550173 | 129 | 0.64368 | 4,570 | 36,891 | 5.046827 | 0.062801 | 0.093002 | 0.042924 | 0.062652 | 0.843739 | 0.823708 | 0.80888 | 0.745361 | 0.712279 | 0.655524 | 0 | 0.012652 | 0.286574 | 36,891 | 866 | 130 | 42.599307 | 0.863673 | 0.498767 | 0 | 0.30916 | 0 | 0 | 0.005936 | 0.001706 | 0 | 0 | 0 | 0 | 0 | 1 | 0.20229 | false | 0 | 0.026718 | 0.003817 | 0.583969 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
814129cc029389df27db41fb569b3e51f6546f41 | 5,470 | py | Python | napari/_vispy/_tests/test_vispy_camera.py | ddawsari/napari | bf0c7d081b644c1c19488fc69df5f03460275f3e | [
"BSD-3-Clause"
] | 1 | 2021-04-04T21:25:04.000Z | 2021-04-04T21:25:04.000Z | napari/_vispy/_tests/test_vispy_camera.py | ddawsari/napari | bf0c7d081b644c1c19488fc69df5f03460275f3e | [
"BSD-3-Clause"
] | null | null | null | napari/_vispy/_tests/test_vispy_camera.py | ddawsari/napari | bf0c7d081b644c1c19488fc69df5f03460275f3e | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
def test_camera(make_test_viewer):
"""Test vispy camera creation in 2D."""
viewer = make_test_viewer()
vispy_camera = viewer.window.qt_viewer.camera
np.random.seed(0)
data = np.random.random((11, 11, 11))
viewer.add_image(data)
# Test default values camera values are used and vispy camera has been
# updated
assert viewer.dims.ndisplay == 2
assert viewer.camera.ndisplay == 2
np.testing.assert_almost_equal(viewer.camera.angles, (0, 0, 90))
np.testing.assert_almost_equal(viewer.camera.center, (5.0, 5.0))
np.testing.assert_almost_equal(viewer.camera.angles, vispy_camera.angles)
np.testing.assert_almost_equal(viewer.camera.center, vispy_camera.center)
np.testing.assert_almost_equal(viewer.camera.zoom, vispy_camera.zoom)
def test_vispy_camera_update_from_model(make_test_viewer):
"""Test vispy camera update from model in 2D."""
viewer = make_test_viewer()
vispy_camera = viewer.window.qt_viewer.camera
np.random.seed(0)
data = np.random.random((11, 11, 11))
viewer.add_image(data)
# Test default values camera values are used and vispy camera has been
# updated
assert viewer.dims.ndisplay == 2
assert viewer.camera.ndisplay == 2
# Update camera center and zoom
viewer.camera.center = (11, 12)
viewer.camera.zoom = 4
np.testing.assert_almost_equal(viewer.camera.angles, (0, 0, 90))
np.testing.assert_almost_equal(viewer.camera.center, (11, 12))
np.testing.assert_almost_equal(viewer.camera.zoom, 4)
np.testing.assert_almost_equal(viewer.camera.angles, vispy_camera.angles)
np.testing.assert_almost_equal(viewer.camera.center, vispy_camera.center)
np.testing.assert_almost_equal(viewer.camera.zoom, vispy_camera.zoom)
def test_camera_model_update_from_vispy(make_test_viewer):
"""Test camera model updates from vispy in 2D."""
viewer = make_test_viewer()
vispy_camera = viewer.window.qt_viewer.camera
np.random.seed(0)
data = np.random.random((11, 11, 11))
viewer.add_image(data)
# Test default values camera values are used and vispy camera has been
# updated
assert viewer.dims.ndisplay == 2
assert viewer.camera.ndisplay == 2
# Update vispy camera center and zoom
vispy_camera.center = (11, 12)
vispy_camera.zoom = 4
vispy_camera.on_draw(None)
np.testing.assert_almost_equal(viewer.camera.angles, (0, 0, 90))
np.testing.assert_almost_equal(viewer.camera.center, (11, 12))
np.testing.assert_almost_equal(viewer.camera.zoom, 4)
np.testing.assert_almost_equal(viewer.camera.angles, vispy_camera.angles)
np.testing.assert_almost_equal(viewer.camera.center, vispy_camera.center)
np.testing.assert_almost_equal(viewer.camera.zoom, vispy_camera.zoom)
def test_3D_camera(make_test_viewer):
"""Test vispy camera creation in 3D."""
viewer = make_test_viewer()
vispy_camera = viewer.window.qt_viewer.camera
np.random.seed(0)
data = np.random.random((11, 11, 11))
viewer.add_image(data)
viewer.dims.ndisplay = 3
assert viewer.camera.ndisplay == 3
# Test camera values have updated
np.testing.assert_almost_equal(viewer.camera.angles, (0, 0, 90))
np.testing.assert_almost_equal(viewer.camera.center, (5.0, 5.0, 5.0))
np.testing.assert_almost_equal(viewer.camera.angles, vispy_camera.angles)
np.testing.assert_almost_equal(viewer.camera.center, vispy_camera.center)
np.testing.assert_almost_equal(viewer.camera.zoom, vispy_camera.zoom)
def test_vispy_camera_update_from_model_3D(make_test_viewer):
"""Test vispy camera update from model in 3D."""
viewer = make_test_viewer()
vispy_camera = viewer.window.qt_viewer.camera
np.random.seed(0)
data = np.random.random((11, 11, 11))
viewer.add_image(data)
viewer.dims.ndisplay = 3
assert viewer.camera.ndisplay == 3
# Update camera angles, center, and zoom
viewer.camera.angles = (24, 12, -19)
viewer.camera.center = (11, 12, 15)
viewer.camera.zoom = 4
np.testing.assert_almost_equal(viewer.camera.angles, (24, 12, -19))
np.testing.assert_almost_equal(viewer.camera.center, (11, 12, 15))
np.testing.assert_almost_equal(viewer.camera.zoom, 4)
np.testing.assert_almost_equal(viewer.camera.angles, vispy_camera.angles)
np.testing.assert_almost_equal(viewer.camera.center, vispy_camera.center)
np.testing.assert_almost_equal(viewer.camera.zoom, vispy_camera.zoom)
def test_camera_model_update_from_vispy_3D(make_test_viewer):
"""Test camera model updates from vispy in 3D."""
viewer = make_test_viewer()
vispy_camera = viewer.window.qt_viewer.camera
np.random.seed(0)
data = np.random.random((11, 11, 11))
viewer.add_image(data)
viewer.dims.ndisplay = 3
assert viewer.camera.ndisplay == 3
# Update vispy camera angles, center, and zoom
viewer.camera.angles = (24, 12, -19)
vispy_camera.center = (11, 12, 15)
vispy_camera.zoom = 4
vispy_camera.on_draw(None)
np.testing.assert_almost_equal(viewer.camera.angles, (24, 12, -19))
np.testing.assert_almost_equal(viewer.camera.center, (11, 12, 15))
np.testing.assert_almost_equal(viewer.camera.zoom, 4)
np.testing.assert_almost_equal(viewer.camera.angles, vispy_camera.angles)
np.testing.assert_almost_equal(viewer.camera.center, vispy_camera.center)
np.testing.assert_almost_equal(viewer.camera.zoom, vispy_camera.zoom)
| 37.210884 | 77 | 0.731444 | 812 | 5,470 | 4.729064 | 0.070197 | 0.1625 | 0.132813 | 0.185938 | 0.977604 | 0.95599 | 0.94974 | 0.94974 | 0.94974 | 0.927344 | 0 | 0.034826 | 0.154845 | 5,470 | 146 | 78 | 37.465753 | 0.795804 | 0.119744 | 0 | 0.863158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.452632 | 1 | 0.063158 | false | 0 | 0.010526 | 0 | 0.073684 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
d48d11710bf7c6c9b844fa72894d8dc500bb0576 | 143 | py | Python | src/handlers/test_error.py | AriyaOk/study_python | dd2b24dc5a94d2cb100463f49739453d4e7b6203 | [
"MIT"
] | null | null | null | src/handlers/test_error.py | AriyaOk/study_python | dd2b24dc5a94d2cb100463f49739453d4e7b6203 | [
"MIT"
] | 10 | 2020-10-20T18:09:16.000Z | 2021-09-22T19:45:32.000Z | src/handlers/test_error.py | AriyaOk/study_python | dd2b24dc5a94d2cb100463f49739453d4e7b6203 | [
"MIT"
] | null | null | null | from framework.types import RequestT
from framework.types import ResponseT
def make_error(_request: RequestT = None) -> ResponseT:
1 / 0
| 20.428571 | 55 | 0.762238 | 19 | 143 | 5.631579 | 0.684211 | 0.242991 | 0.336449 | 0.448598 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016807 | 0.167832 | 143 | 6 | 56 | 23.833333 | 0.882353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.5 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
d4a5cd8e84f105ace06220eec20809962f7e1fc7 | 7,962 | py | Python | tests/unit/ppr/test_renewal_statement.py | doug-lovett/test-schemas-dl | a05e87b983f2c3559c081dd65aff05e2c67e6186 | [
"Apache-2.0"
] | null | null | null | tests/unit/ppr/test_renewal_statement.py | doug-lovett/test-schemas-dl | a05e87b983f2c3559c081dd65aff05e2c67e6186 | [
"Apache-2.0"
] | null | null | null | tests/unit/ppr/test_renewal_statement.py | doug-lovett/test-schemas-dl | a05e87b983f2c3559c081dd65aff05e2c67e6186 | [
"Apache-2.0"
] | null | null | null | # Copyright © 2020 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Suite to ensure the PPR Renewal Statement (request and response) schema is valid.
"""
import copy
from registry_schemas import validate
from registry_schemas.example_data.ppr import RENEWAL_STATEMENT
def test_valid_renewal_request():
"""Assert that the schema is performing as expected for a renewal request."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['courtOrderInformation']
del statement['createDateTime']
del statement['renewalRegistrationNumber']
del statement['payment']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_valid_renewal_RL_request():
"""Assert that the schema is performing as expected for a repairer's lien renewal request."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['expiryDate']
del statement['createDateTime']
del statement['renewalRegistrationNumber']
del statement['payment']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_valid_renewal_response():
"""Assert that the schema is performing as expected for an renewal response."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert is_valid
def test_invalid_renewal_baseregnum():
"""Assert that an invalid renewal statement fails - base registration number too long."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
statement['baseRegistrationNumber'] = 'B0000123456789'
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_clientref():
"""Assert that an invalid renewal statement fails - client reference number too long."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
statement['clientReferenceId'] = 'RSXXXXXXXX00001234567'
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_courtorder():
"""Assert that an invalid renewal statement fails - court order court name is missing."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
del statement['courtOrderInformation']['courtName']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_expiry():
"""Assert that an invalid renewal statement fails - expiry date format is invalid."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
statement['expiryDate'] = 'XXXXXXXX'
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_timestamp():
"""Assert that an invalid renewal statement fails - create timestamp format is invalid."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
statement['createDateTime'] = 'XXXXXXXXXXXX'
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_regnum():
"""Assert that an invalid renewal statement fails - registration number too long."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
statement['renewalRegistrationNumber'] = 'D000012345678'
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_missing_debtor_first():
"""Assert that an invalid renewal statement fails - base debtor name is missing."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['createDateTime']
del statement['renewalRegistrationNumber']
del statement['payment']
del statement['baseDebtor']['businessName']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_missing_regparty_address():
"""Assert that an invalid renewal statement fails - registering party address is missing."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
del statement['registeringParty']['address']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_missing_basereg():
"""Assert that an invalid renewal statement fails - base registration number is missing."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
del statement['baseRegistrationNumber']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_missing_regparty():
"""Assert that an invalid renewal statement fails - registering party is missing."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
del statement['registeringParty']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_missing_debtor():
"""Assert that an invalid renewal statement fails - base debtor and statement reg number are missing."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['baseDebtor']
del statement['createDateTime']
del statement['renewalRegistrationNumber']
del statement['payment']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
def test_invalid_renewal_missing_expiry():
"""Assert that an invalid renewal statement fails - expiry date and court order information are missing."""
statement = copy.deepcopy(RENEWAL_STATEMENT)
del statement['expiryDate']
del statement['courtOrderInformation']
del statement['createDateTime']
del statement['renewalRegistrationNumber']
del statement['payment']
is_valid, errors = validate(statement, 'renewalStatement', 'ppr')
if errors:
for err in errors:
print(err.message)
print(errors)
assert not is_valid
| 29.380074 | 111 | 0.704974 | 920 | 7,962 | 5.990217 | 0.159783 | 0.076211 | 0.057158 | 0.076211 | 0.802032 | 0.802032 | 0.799492 | 0.770459 | 0.759753 | 0.686445 | 0 | 0.006954 | 0.20535 | 7,962 | 270 | 112 | 29.488889 | 0.863917 | 0.240517 | 0 | 0.828221 | 0 | 0 | 0.160618 | 0.046707 | 0 | 0 | 0 | 0 | 0.092025 | 1 | 0.092025 | false | 0 | 0.018405 | 0 | 0.110429 | 0.184049 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4f13f72c985ffbc3a1eb9a4e4490c8f6aec73a8 | 9,817 | py | Python | courses/migrations/0015_auto_20190906_2129.py | xni06/wagtail-CMS | defe0f46e8109e96d6d5e9fd4cf002790fbcd54b | [
"MIT"
] | 4 | 2019-06-04T07:18:44.000Z | 2020-06-15T22:27:36.000Z | courses/migrations/0015_auto_20190906_2129.py | jaspotsangbam/wagtail-CMS | 2ec0dd05ba1f9339b705ce529588131049aa9bc7 | [
"MIT"
] | 38 | 2019-05-09T13:14:56.000Z | 2022-03-12T00:54:57.000Z | courses/migrations/0015_auto_20190906_2129.py | jaspotsangbam/wagtail-CMS | 2ec0dd05ba1f9339b705ce529588131049aa9bc7 | [
"MIT"
] | 3 | 2019-09-26T14:32:36.000Z | 2021-05-06T15:48:01.000Z | # Generated by Django 2.2.4 on 2019-09-06 21:29
from django.db import migrations
import wagtail.core.blocks
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('courses', '0014_coursemanagepage_one_selected_text'),
]
operations = [
migrations.AlterField(
model_name='coursecomparisonpage',
name='accordions',
field=wagtail.core.fields.StreamField([('satisfaction_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('lead_text', wagtail.core.blocks.CharBlock(required=False)), ('intro_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('teaching_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('learning_opportunities_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('assessment_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('support_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('organisation_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('learning_resources_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('learning_community_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('student_voice_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('nhs_placement_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('entry_information_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('qualification_heading', wagtail.core.blocks.CharBlock(required=False)), ('qualification_intro', wagtail.core.blocks.CharBlock(required=False)), ('qualification_label_explanation_heading', wagtail.core.blocks.CharBlock(required=False)), ('qualification_label_explanation_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('qualification_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('tariffs_heading', wagtail.core.blocks.CharBlock(required=False)), ('tariffs_intro', wagtail.core.blocks.CharBlock(required=False)), ('tariffs_data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('after_one_year_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('section_heading', wagtail.core.blocks.CharBlock(required=False)), ('intro', wagtail.core.blocks.CharBlock(required=False)), ('lead', wagtail.core.blocks.CharBlock(required=False)), ('label_explanation_heading', wagtail.core.blocks.CharBlock(required=False)), ('label_explanation_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('after_course_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('section_heading', wagtail.core.blocks.CharBlock(required=False)), ('intro', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_earnings_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_earnings_explanation', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_earnings_salary_range_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_earnings_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('three_years_earnings_heading', wagtail.core.blocks.CharBlock(required=False)), ('three_years_earnings_explanation', wagtail.core.blocks.RichTextBlock(blank=True)), ('three_years_earnings_salary_range_heading', wagtail.core.blocks.CharBlock(required=False)), ('three_years_earnings_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_employment_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_intro', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_lead', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_employment_roles_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_roles_intro', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_roles_label_explanation_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_roles_label_explanation_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_employment_roles_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('common_jobs_heading', wagtail.core.blocks.CharBlock(required=False)), ('common_jobs_intro', wagtail.core.blocks.CharBlock(required=False)), ('common_jobs_data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('accreditation_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('section_heading', wagtail.core.blocks.CharBlock(required=False))], icon='collapse-down', required=True))]),
),
migrations.AlterField(
model_name='coursedetailpage',
name='accordions',
field=wagtail.core.fields.StreamField([('satisfaction_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('lead_text', wagtail.core.blocks.CharBlock(required=False)), ('intro_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('teaching_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('learning_opportunities_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('assessment_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('support_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('organisation_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('learning_resources_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('learning_community_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('student_voice_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('nhs_placement_stats_header', wagtail.core.blocks.CharBlock(required=False)), ('data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('entry_information_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('qualification_heading', wagtail.core.blocks.CharBlock(required=False)), ('qualification_intro', wagtail.core.blocks.CharBlock(required=False)), ('qualification_label_explanation_heading', wagtail.core.blocks.CharBlock(required=False)), ('qualification_label_explanation_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('qualification_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('tariffs_heading', wagtail.core.blocks.CharBlock(required=False)), ('tariffs_intro', wagtail.core.blocks.CharBlock(required=False)), ('tariffs_data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('after_one_year_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('section_heading', wagtail.core.blocks.CharBlock(required=False)), ('intro', wagtail.core.blocks.CharBlock(required=False)), ('lead', wagtail.core.blocks.CharBlock(required=False)), ('label_explanation_heading', wagtail.core.blocks.CharBlock(required=False)), ('label_explanation_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('after_course_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('section_heading', wagtail.core.blocks.CharBlock(required=False)), ('intro', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_earnings_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_earnings_explanation', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_earnings_salary_range_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_earnings_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('three_years_earnings_heading', wagtail.core.blocks.CharBlock(required=False)), ('three_years_earnings_explanation', wagtail.core.blocks.RichTextBlock(blank=True)), ('three_years_earnings_salary_range_heading', wagtail.core.blocks.CharBlock(required=False)), ('three_years_earnings_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_employment_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_intro', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_lead', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_employment_roles_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_roles_intro', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_roles_label_explanation_heading', wagtail.core.blocks.CharBlock(required=False)), ('six_month_employment_roles_label_explanation_body', wagtail.core.blocks.RichTextBlock(blank=True)), ('six_month_employment_roles_data_source', wagtail.core.blocks.RichTextBlock(blank=True)), ('common_jobs_heading', wagtail.core.blocks.CharBlock(required=False)), ('common_jobs_intro', wagtail.core.blocks.CharBlock(required=False)), ('common_jobs_data_source', wagtail.core.blocks.RichTextBlock(blank=True))], icon='collapse-down', required=True)), ('accreditation_panel', wagtail.core.blocks.StructBlock([('heading', wagtail.core.blocks.CharBlock(required=False)), ('section_heading', wagtail.core.blocks.CharBlock(required=False))], icon='collapse-down', required=True))]),
),
]
| 377.576923 | 4,642 | 0.799328 | 1,181 | 9,817 | 6.415749 | 0.077053 | 0.177115 | 0.266992 | 0.260789 | 0.960538 | 0.960538 | 0.960538 | 0.960538 | 0.960538 | 0.960538 | 0 | 0.002021 | 0.042274 | 9,817 | 25 | 4,643 | 392.68 | 0.803872 | 0.004584 | 0 | 0.421053 | 1 | 0 | 0.297646 | 0.209928 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.157895 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
d4f5ef01240b911c6f9e6fb0e72b37b0992379b0 | 32,724 | py | Python | src/sampling.py | ymori206226/test | 001639831ea28814e071ba6a23e8c35da85b6908 | [
"Apache-2.0"
] | null | null | null | src/sampling.py | ymori206226/test | 001639831ea28814e071ba6a23e8c35da85b6908 | [
"Apache-2.0"
] | null | null | null | src/sampling.py | ymori206226/test | 001639831ea28814e071ba6a23e8c35da85b6908 | [
"Apache-2.0"
] | null | null | null | """
#######################
# quket #
#######################
sampling.py
Functions related to sampling simulations.
(2020/12/06) Currently disabled.
"""
import time
import csv
import numpy as np
from qulacs.observable import create_observable_from_openfermion_text
from qulacs import QuantumState
from qulacs import QuantumCircuit
from qulacs.gate import P0, P1
from openfermion.ops import QubitOperator
from . import utils
from .fileio import prints, print_state
from .ucclib import set_circuit_uccsd
from .hflib import set_circuit_rhf, set_circuit_uhf
from .phflib import set_circuit_rhfZ, set_circuit_uhfZ, controlled_Ug
def sample_observable(state, obs, n_sample):
"""Function
Args:
state (qulacs.QuantumState):
obs (qulacs.Observable)
n_sample (int): number of samples for each observable
Return:
:float: sampled expectation value of the observable
Author(s): Takashi Tsuchimochi
"""
n_term = obs.get_term_count()
n_qubits = obs.get_qubit_count()
exp = 0
buf_state = QuantumState(n_qubits)
for i in range(n_term):
pauli_term = obs.get_term(i)
coef = pauli_term.get_coef()
pauli_id = pauli_term.get_pauli_id_list()
pauli_index = pauli_term.get_index_list()
if len(pauli_id) == 0: # means identity
exp += coef
continue
buf_state.load(state)
measurement_circuit = QuantumCircuit(n_qubits)
mask = "".join(["1" if n_qubits - 1 - k in pauli_index else "0"
for k in range(n_qubits)])
for single_pauli, index in zip(pauli_id, pauli_index):
if single_pauli == 1:
measurement_circuit.add_H_gate(index)
elif single_pauli == 2:
measurement_circuit.add_Sdag_gate(index)
measurement_circuit.add_H_gate(index)
measurement_circuit.update_quantum_state(buf_state)
samples = buf_state.sampling(n_sample)
mask = int(mask, 2)
exp += (coef
*sum(list(map(lambda x: (-1)**(bin(x & mask).count("1")),
samples)))
/n_sample)
return exp
def adaptive_sample_observable(state, obs, n_sample):
"""
Args:
state (qulacs.QuantumState):
obs (qulacs.Observable)
n_sample (int): number of samples for each observable
Return:
:float: sampled expectation value of the observable
"""
n_term = obs.get_term_count()
n_qubits = obs.get_qubit_count()
exp = 0
buf_state = QuantumState(n_qubits)
### check the coefficients for each term...
coef_list = np.array([abs(obs.get_term(i).get_coef())
for i in range(n_term)])
sum_coef = np.sum(coef_list)
### sort
sorted_indices = np.argsort(-coef_list)
coef_list.sort()
#sorted_coef_list = [coef_list[i] for i in sorted_indices]
### determine sampling wight
n_sample_total = n_sample*n_term
n_sample_list = n_sample_total*coef_list//sum_coef
n_count = np.sum(n_sample_list)
n_rest = n_sample_total - n_count
n_sample_list[sorted_indices[:n_rest]] += 1
j = 0
for i in range(n_term):
if n_sample_list[i] == 0:
continue
j += n_sample_list[i]
pauli_term = obs.get_term(i)
coef = pauli_term.get_coef()
pauli_id = pauli_term.get_pauli_id_list()
pauli_index = pauli_term.get_index_list()
if len(pauli_id) == 0: # means identity
exp += coef
continue
buf_state.load(state)
measurement_circuit = QuantumCircuit(n_qubits)
mask = "".join(["1" if n_qubits - 1 - k in pauli_index else "0"
for k in range(n_qubits)])
for single_pauli, index in zip(pauli_id, pauli_index):
if single_pauli == 1:
measurement_circuit.add_H_gate(index)
elif single_pauli == 2:
measurement_circuit.add_Sdag_gate(index)
measurement_circuit.add_H_gate(index)
measurement_circuit.update_quantum_state(buf_state)
samples = buf_state.sampling(n_sample_list[i])
mask = int(mask, 2)
exp += (coef
*sum(list(map(lambda x: (-1)**(bin(x & mask).count("1")),
samples)))
/n_sample_list[i])
return exp
def test_observable(state, obs, obsZ, n_sample):
"""Function
Args:
state (qulacs.QuantumState): This includes entangled ancilla
(n_qubits = n_qubit_system + 1)
obs (qulacs.Observable): This does not include ancilla Z
(n_qubit_system)
obsZ (qulacs.Observable): Single Pauli Z for ancilla (1)
poststate0 (qulacs.QuantumState): post-measurement state
when ancilla = 0 (n_qubit_system)
poststate1 (qulacs.QuantumState): post-measurement state
when ancilla = 1 (n_qubit_system)
n_sample (int): number of samples for each observable
Return:
:float: sampled expectation value of the observable
Author(s): Takashi Tsuchimochi
"""
n_term = obs.get_term_count()
n_qubits = obs.get_qubit_count()
p0 = state.get_zero_probability(n_qubits)
p1 = 1 - p0
opt = f"0{n_qubits}b"
expH = 0
exp = []
coef = []
buf_state = QuantumState(n_qubits)
for i in range(n_term):
pauli_term = obs.get_term(i)
coef.append(pauli_term.get_coef().real)
pauli_id = pauli_term.get_pauli_id_list()
pauli_index = pauli_term.get_index_list()
if len(pauli_id) == 0: # means identity
exp.extend(coef)
continue
buf_state.load(state)
measurement_circuit = QuantumCircuit(n_qubits)
mask = "".join(["1" if n_qubits - 1 - k in pauli_index else "0"
for k in range(n_qubits)])
measure_observable = QubitOperator((), 1)
for single_pauli, index in zip(pauli_id, pauli_index):
if single_pauli == 1:
### X
measurement_circuit.add_H_gate(index)
measure_observable *= QubitOperator(f"X{index}")
elif single_pauli == 2:
### Y
measurement_circuit.add_Sdag_gate(index)
measurement_circuit.add_H_gate(index)
measure_observable *= QubitOperator(f"Y{index}")
elif single_pauli == 3:
### Z
measure_observable *= QubitOperator(f"Z{index}")
qulacs_measure_observable \
= create_observable_from_openfermion_text(
str(measure_observable))
measurement_circuit.update_quantum_state(buf_state)
#exp.append(obsZ.get_expectation_value(buf_state).real)
samples = buf_state.sampling(n_sample)
#print(f"samples? {format(samples[0], opt)}")
#print(f"I = {i:5d} h_I = {coef[i]:10.5f} <P_I> = {exp[i]:10.5f}")
#mask = int(mask, 2)
#print(sum(list(map(lambda x: (-1)**(bin(x & mask).count('1')),
# samples))))
#print(coef*sum(list(map(lambda x: (-1)**(bin(x & mask).count('1')),
# samples))))
expH += coef[i]*exp[i]
samples = buf_state.sampling(n_sample)
mask = int(mask, 2)
prob = (sum(list(map(lambda x: (-1)**(bin(x & mask).count("1")),
samples)))
/n_sample)
measure_list = list(map(int, np.ones(n_qubits)*2))
for j in pauli_index:
measure_list[j] = 1
#print(qulacs_measure_observable.get_expectation_value(state))
expH += coef[i]*prob
#print(f"coef: {coef[i]:10.5f} prob: {prob:10.5f}")
return expH
def sample_observable_noisy_circuit(circuit, initial_state, obs,
n_circuit_sample=1000,
n_sample_per_circuit=1):
"""Function
Args:
circuit (:class:`qulacs.QuantumCircuit`)
initial_state (:class:`qulacs.QuantumState`)
obs (:class:`qulacs.Observable`)
n_circuit_sample (:class:`int`): number of circuit samples
n_sample (:class:`int`): number of samples per one circuit samples
Return:
:float: sampled expectation value of the observable
Author(s): Unknown
"""
exp = 0
state = QuantumState(obs.get_qubit_count())
for _ in range(n_circuit_sample):
state.load(initial_state)
circuit.update_quantum_state(state)
exp += sample_observable(state, obs, n_sample_per_circuit)
exp /= n_circuit_sample
return exp
def test_transition_observable(state, obs, poststate0, poststate1, n_sample):
"""
Args:
state (qulacs.QuantumState): This includes entangled ancilla
(n_qubits = n_qubit_system + 1)
obs (qulacs.Observable): This does not include ancilla Z
(n_qubit_system)
poststate0 (qulacs.QuantumState): post-measurement state
when ancilla = 0 (n_qubit_system)
poststate1 (qulacs.QuantumState): post-measurement state
when ancilla = 1 (n_qubit_system)
n_sample (int): number of samples for each observable
Return:
:float: sampled expectation value of the observable
"""
n_term = obs.get_term_count()
n_qubits = obs.get_qubit_count()
p0 = state.get_zero_probability(n_qubits - 1)
p1 = 1 - p0
opt = f"0{n_qubits}b"
prints(f"p0: {p0} p1: {p1}")
print_state(poststate0, name="post(0)")
prints("post(1)")
print_state(poststate1, name="post(1)")
expH = 0
exp = []
coef = []
buf_state = QuantumState(n_qubits)
for i in range(n_term):
pauli_term = obs.get_term(i)
coef.append(pauli_term.get_coef().real)
pauli_id = pauli_term.get_pauli_id_list()
pauli_index = pauli_term.get_index_list()
if len(pauli_id) == 0: # means identity
exp.extend(coef)
continue
buf_state.load(state)
measurement_circuit = QuantumCircuit(n_qubits)
mask = "".join(["1" if n_qubits - 1 - k in pauli_index else "0"
for k in range(n_qubits)])
measure_observable = QubitOperator((), 1)
#measure_observable = QubitOperator('Z%d' % n_qubits)
for single_pauli, index in zip(pauli_id, pauli_index):
if single_pauli == 1:
### X
measurement_circuit.add_H_gate(index)
measure_observable *= QubitOperator(f"X{index}")
elif single_pauli == 2:
### Y
measurement_circuit.add_Sdag_gate(index)
measurement_circuit.add_H_gate(index)
measure_observable *= QubitOperator(f"Y{index}")
elif single_pauli == 3:
### Z
measure_observable *= QubitOperator(f"Z{index}")
qulacs_measure_observable \
= create_observable_from_openfermion_text(
str(measure_observable))
### p0 ###
H0 = qulacs_measure_observable.get_expectation_value(poststate0)
### p1 ###
H1 = qulacs_measure_observable.get_expectation_value(poststate1)
prob = p0*H0 - p1*H1
# print(prob, qulacs_measure_observable.get_expectation_value(state), obs.get_expectation_value(state))
prob = qulacs_measure_observable.get_expectation_value(state)
expH += coef[i]*prob
# measurement_circuit.update_quantum_state(buf_state)
# samples = buf_state.sampling(n_sample)
# print('samples? ',format(samples[0],opt))
# print("I = :",'%5d' % i, " h_I ", '%10.5f' % coef[i], " <P_I> ", '%10.5f' % exp[i])
# mask = int(mask, 2)
# print(sum(list(map(lambda x: (-1) **(bin(x & mask).count('1')), samples))))
# print(coef*sum(list(map(lambda x: (-1) **
# (bin(x & mask).count('1')), samples))))
# expH += coef[i] * exp[i]
# samples = buf_state.sampling(n_sample)
# mask = int(mask, 2)
# prob = sum(list(map(lambda x: (-1) **
# (bin(x & mask).count('1')), samples)))/n_sample
# measure_list = list(map(int,np.ones(n_qubits)*2))
# for j in pauli_index:
# measure_list[j] = 1
# print(qulacs_measure_observable.get_expectation_value(state))
# expH += coef[i] * prob
print(f"coef: {coef[i]:10.5f} prob: {prob:10.5f}")
return expH
def sample_observable_noisy_circuit(circuit, initial_state, obs,
n_circuit_sample=1000,
n_sample_per_circuit=1):
"""
Args:
circuit (:class:`qulacs.QuantumCircuit`)
initial_state (:class:`qulacs.QuantumState`)
obs (:class:`qulacs.Observable`)
n_circuit_sample (:class:`int`): number of circuit samples
n_sample (:class:`int`): number of samples per one circuit samples
Return:
:float: sampled expectation value of the observable
Author(s): Unknown
"""
exp = 0
state = QuantumState(obs.get_qubit_count())
for _ in range(n_circuit_sample):
state.load(initial_state)
circuit.update_quantum_state(state)
exp += sample_observable(state, obs, n_sample_per_circuit)
exp /= n_circuit_sample
return exp
def cost_phf_sample(Quket, print_level,
qulacs_hamiltonian, qulacs_hamiltonianZ, qulacs_s2Z,
qulacs_ancZ, coef0_H, coef0_S2, ref, theta_list,
samplelist):
"""Function:
Sample Hamiltonian and S**2 expectation values with PHF and PUCCSD.
Write out the statistics in csv files.
Author(s): Takashi Tsuchimochi
"""
t1 = time.time()
noa = Quket.noa
nob = Quket.nob
nva = Quket.nva
nvb = Quket.nvb
n_electrons = Quket.n_electrons
n_qubit_system = n_qubits
n_qubits = Quket.n_qubits + 1
anc = n_qubit_system
ndim1 = Quket.ndim1
state = QuantumState(n_qubits)
circuit_rhf = set_circuit_rhfZ(n_qubits, n_electrons)
circuit_rhf.update_quantum_state(state)
if ref == "phf":
circuit_uhf = set_circuit_uhfZ(n_qubits, noa, nob, nva, nvb, theta_list)
circuit_uhf.update_quantum_state(state)
print("pHF")
elif ref == "puccsd":
circuit = set_circuit_uccsd(n_qubits, noa, nob, nva, nvb, theta_list,
ndim1)
for i in range(rho):
circuit.update_quantum_state(state)
print("UCCSD")
if print_level > -1:
print("State before projection")
utils.print_state(state, n_qubit_system)
#### Set post-measurement states ####
#poststate0 = state.copy()
#poststate1 = state.copy()
#circuit0 = QuantumCircuit(n_qubits)
#circuit1 = QuantumCircuit(n_qubits)
#### Projection to anc = 0 or anc = 1 ###
#circuit0.add_gate(P0(0))
#circuit1.add_gate(P1(0))
#circuit0.update_quantum_state(poststate0)
#circuit1.update_quantum_state(poststate1)
#### Renormalize each state ###
#norm0 = poststate0.get_squared_norm()
#norm1 = poststate1.get_squared_norm()
#poststate0.normalize(norm0)
#poststate1.normalize(norm1)
### grid loop ###
Ng = 4
beta = [-0.861136311594053, -0.339981043584856,
0.339981043584856, 0.861136311594053]
wg = [0.173927422568724, 0.326072577431273,
0.326072577431273, 0.173927422568724]
Ng = 2
beta = [0.577350269189626, -0.577350269189626]
wg = [0.5, 0.5]
### a list to compute the probability to observe 0 in ancilla qubit
p0_list = np.full(n_qubits, 2)
p0_list[-1] = 0
### Array for <HUg>, <S2Ug>, <Ug>
# samplelist = [10,100,1000,10000,100000,1000000,10000000]
ncyc = 4
prints("", filepath="./log2.txt")
for i_sample in samplelist:
i_sample_x = i_sample
if i_sample == 10000000:
print("OK")
ncyc = ncyc*10
i_sample_x = 1000000
sampleHUg1 = []
sampleHUg2 = []
sampleHUg3 = []
sampleHUg4 = []
sampleS2Ug1 = []
sampleS2Ug2 = []
sampleS2Ug3 = []
sampleS2Ug4 = []
sampleUg1 = []
sampleUg2 = []
sampleUg3 = []
sampleUg4 = []
# sampleEn = []
# sampleS2 = []
sampleHUg = np.zeros((ncyc, Ng))
sampleS2Ug = np.zeros((ncyc, Ng))
sampleUg = np.zeros((ncyc, Ng))
sampleEn = np.zeros((ncyc, 1))
sampleS2 = np.zeros((ncyc, 1))
for icyc in range(ncyc):
prints(f"n_sample = {i_sample_x} ({icyc} / {ncyc})",
filepath="./log2.txt")
HUg = []
S2Ug = []
Ug = []
Ephf = S2 = Norm = 0
for i in range(Ng):
### Copy quantum state of UHF (cannot be done in real device) ###
state_g = QuantumState(n_qubits)
state_g.load(state)
### Construct Ug test
circuit_ug = QuantumCircuit(n_qubits)
### Hadamard on anc
circuit_ug.add_H_gate(anc)
controlled_Ug(circuit_ug, n_qubits, anc, np.arccos(beta[i]))
circuit_ug.add_H_gate(anc)
circuit_ug.update_quantum_state(state_g)
### Set post-measurement states ####
poststate0 = state_g.copy()
poststate1 = state_g.copy()
circuit0 = QuantumCircuit(n_qubits)
circuit1 = QuantumCircuit(n_qubits)
### Projection to anc = 0 or anc = 1 ###
circuit0.add_gate(P0(anc))
circuit1.add_gate(P1(anc))
circuit0.update_quantum_state(poststate0)
circuit1.update_quantum_state(poststate1)
### Renormalize each state ###
norm0 = poststate0.get_squared_norm()
norm1 = poststate1.get_squared_norm()
poststate0.normalize(norm0)
poststate1.normalize(norm1)
### Set ancilla qubit of poststate1 to zero (so that it won't be used) ###
circuit_anc = QuantumCircuit(n_qubits)
circuit_anc.add_X_gate(anc)
circuit_anc.update_quantum_state(poststate1)
print(
test_transition_observable(
state_g, qulacs_hamiltonianZ,
poststate0, poststate1, 100000))
# exit()
### Probabilities for getting 0 and 1 in ancilla qubit ###
p0 = state_g.get_marginal_probability(p0_list)
p1 = 1 - p0
### Compute expectation value <HUg> ###
HUg.append(sample_observable(state_g,
qulacs_hamiltonianZ,
i_sample_x).real)
#HUg.append(adaptive_sample_observable(state_g,
# qulacs_hamiltonianZ,
# i_sample_x).real)
### <S2Ug> ###
S2Ug.append(sample_observable(state_g,
qulacs_s2Z,
i_sample_x).real)
#S2Ug.append(adaptive_sample_observable(state_g,
# qulacs_s2Z,
# i_sample_x).real)
#S2Ug.append(qulacs_s2Z.get_expectation_value(state_g))
#HUg.append(0)
#S2Ug.append(0)
#Ug.append(p0 - p1)
n_term = qulacs_hamiltonianZ.get_term_count()
n_sample_total = i_sample_x * n_term
# in the worst-case scenario,
# Ug is measured as many times as n_sample_total
#(required to evaluate HUg)
Ug.append(sample_observable(state_g,
qulacs_ancZ,
i_sample_x*n_term).real)
#p0_sample = 0
#for j_sample in range(n_sample_total):
# if(p0 > np.random.rand()):
# p0_sample += 1
#Ug.append(2*p0_sample/n_sample_total - 1)
### Norm accumulation ###
Norm += wg[i]*Ug[i]
sampleHUg[icyc, i] = HUg[i]
sampleS2Ug[icyc, i] = S2Ug[i]
sampleUg[icyc, i] = Ug[i]
#print('p0 : ',p0,' p1 : ',p1, ' p0 - p1 : ',p0-p1)
sampleHUg1.append(HUg[0])
sampleHUg2.append(HUg[1])
#sampleHUg3.append(HUg[2])
#sampleHUg4.append(HUg[3])
sampleS2Ug1.append(S2Ug[0])
sampleS2Ug2.append(S2Ug[1])
#sampleS2Ug3.append(S2Ug[2])
#sampleS2Ug4.append(S2Ug[3])
sampleUg1.append(Ug[0])
sampleUg2.append(Ug[1])
#sampleUg3.append(Ug[2])
#sampleUg4.append(Ug[3])
### Energy calculation <HP>/<P> and <S**2P>/<P> ###
Ephf = 0
for i in range(Ng):
Ephf += wg[i]*HUg[i]/Norm
S2 += wg[i]*S2Ug[i]/Norm
# print(" <S**2> = ", S2, '\n')
Ephf += coef0_H
S2 += coef0_S2
sampleEn[icyc, 0] = Ephf
sampleS2[icyc, 0] = S2
# print(" <E[PHF]> (Nsample = ",i_sample,") = ", Ephf)
#print(f"(n_sample = {i_sample}): sample HUg1\n",sampleHUg1)
#print(f"(n_sample = {i_sample}): sample HUg2\n",sampleHUg2)
#print(f"(n_sample = {i_sample}): sample HUg3\n",sampleHUg3)
#print(f"(n_sample = {i_sample}): sample HUg4\n",sampleHUg4)
#print(f"(n_sample = {i_sample}): sample S2Ug1\n",sampleS2Ug1)
#print(f"(n_sample = {i_sample}): sample S2Ug2\n",sampleS2Ug2)
#print(f"(n_sample = {i_sample}): sample S2Ug3\n",sampleS2Ug3)
#print(f"(n_sample = {i_sample}): sample S2Ug4\n",sampleS2Ug4)
#print(f"(n_sample = {i_sample}): sample Ug1\n",sampleUg1)
#print(f"(n_sample = {i_sample}): sample Ug2\n",sampleUg2)
#print(f"(n_sample = {i_sample}): sample Ug3\n",sampleUg3)
#print(f"(n_sample = {i_sample}): sample Ug4\n",sampleUg4)
#print(f"(n_sample = {i_sample}): sample HUg1\n",sampleHUg1)
#print(f"(n_sample = {i_sample}): sample HUg2\n",sampleHUg2)
#print(f"(n_sample = {i_sample}): sample HUg3\n",sampleHUg3)
#print(f"(n_sample = {i_sample}): sample HUg4\n",sampleHUg4)
#print(f"(n_sample = {i_sample}): sample En\n",sampleEn)
#print(f"(n_sample = {i_sample}): sample S2\n",sampleS2)
with open(f"./Ug_{i_sample}.csv", "w") as fUg:
writer = csv.writer(fUg)
writer.writerows(sampleUg)
with open(f"./HUg_{i_sample}.csv", "w") as fHUg:
writer = csv.writer(fHUg)
writer.writerows(sampleHUg)
with open(f"./S2Ug_{i_sample}.csv", "w") as fS2Ug:
writer = csv.writer(fS2Ug)
writer.writerows(sampleS2Ug)
with open(f"./En_{i_sample}.csv", "w") as fEn:
writer = csv.writer(fEn)
writer.writerows(sampleEn)
with open(f"./S2_{i_smaple}.csv", "w") as fS2:
writer = csv.writer(fS2)
writer.writerows(sampleS2)
return Ephf, S2
def cost_uhf_sample(Quket, print_level, qulacs_hamiltonian, qulacs_s2,
kappa_list, samplelist):
"""Function:
Sample Hamiltonian and S**2 expectation values with UHF.
Write out the statistics in csv files.
Author(s): Takashi Tsuchimochi
"""
noa = Quket.noa
nob = Quket.nob
nva = Quket.nva
nvb = Quket.nvb
n_electrons = Quket.n_electrons
n_qubit_system = n_qubits
n_qubits = Quket.n_qubits + 1
anc = n_qubit_system
ncyc = 13
opt = f"0{n_qubit_system}b"
prints("", filepath="./log.txt", opentype="w")
for i_sample in samplelist:
sampleEn = np.zeros((ncyc, 1))
sampleS2 = np.zeros((ncyc, 1))
for icyc in range(ncyc):
prints(f"n_sample = {i_sample} ({icyc:3d} / {ncyc})",
filepath="./log.txt")
state = QuantumState(n_qubit_system)
circuit_rhf = set_circuit_rhf(n_qubit_system, n_electrons)
circuit_rhf.update_quantum_state(state)
circuit = set_circuit_uhf(n_qubit_system, noa, nob, nva, nvb,
kappa_list)
circuit.update_quantum_state(state)
Euhf = sample_observable(state, qulacs_hamiltonian, i_sample).real
#S2 = sample_observable(state, qulacs_s2, i_sample).real
#Euhf = adaptive_sample_observable(state,
# qulacs_hamiltonian,
# i_sample).real
#S2 = adaptive_sample_observable(state, qulacs_s2, i_sample).real
sampleEn[icyc, 0] = Euhf
#sampleS2[icyc,0] = S2
S2 = 0
with open(f"./UEn_{i_sample}.csv", "w") as fEn:
writer = csv.writer(fEn)
writer.writerows(sampleEn)
#with open('./US2_%d.csv' % i_sample, 'w') as fS2:
# writer = csv.writer(fS2)
# writer.writerows(sampleS2)
return Euhf, S2
def cost_phf_sample_oneshot(print_level, qulacs_hamiltonianZ, qulacs_s2Z,
qulacs_ancZ, coef0_H, coef0_S2, kappa_list):
"""Function:
Test function for sampling Hamiltonian and S** expectation values
with PHF just for once.
Author(s): Takashi Tsuchimochi
使われてない?
"""
t1 = time.time()
noa = Quket.noa
nob = Quket.nob
nva = Quket.nva
nvb = Quket.nvb
n_electrons = Quket.n_electrons
n_qubit_system = n_qubits
n_qubits = Quket.n_qubits + 1
anc = n_qubit_system
state = QuantumState(n_qubits)
circuit_rhf = set_circuit_rhfZ(n_qubits, n_electrons)
circuit_rhf.update_quantum_state(state)
circuit_uhf = set_circuit_uhfZ(n_qubits, noa, nob, nva, nvb, kappa_list)
circuit_uhf.update_quantum_state(state)
### Set post-measurement states ####
poststate0 = state.copy()
poststate1 = state.copy()
circuit0 = QuantumCircuit(n_qubits)
circuit1 = QuantumCircuit(n_qubits)
### Projection to anc = 0 or anc = 1 ###
circuit0.add_gate(P0(0))
circuit1.add_gate(P1(0))
circuit0.update_quantum_state(poststate0)
circuit1.update_quantum_state(poststate1)
### Renormalize each state ###
norm0 = poststate0.get_squared_norm()
norm1 = poststate1.get_squared_norm()
poststate0.normalize(norm0)
poststate1.normalize(norm1)
### grid loop ###
Ng = 4
beta = [-0.861136311594053, -0.339981043584856,
0.339981043584856, 0.861136311594053]
wg = [0.173927422568724, 0.326072577431273,
0.326072577431273, 0.173927422568724]
### a list to compute the probability to observe 0 in ancilla qubit
p0_list = np.full(n_qubits, 2)
p0_list[-1] = 0
### Array for <HUg>, <S2Ug>, <Ug>
samplelist = [5, 50, 500, 5000, 50000, 500000, 5000000]
Ng = 4
ncyc = 10
prints("", filepath="./log.txt", opentype="w")
for i_sample in samplelist:
sampleEn = []
sampleS2 = []
for icyc in range(ncyc):
prints(f"n_sample : {i_sample} ({icyc} / {ncyc})",
filepath="./log.txt")
HUg = []
S2Ug = []
Ug = []
Ephf = S2 = Norm = 0
for i in range(Ng):
### Copy quantum state of UHF (cannot be done in real device) ###
state_g = QuantumState(n_qubits)
circuit_rhf.update_quantum_state(state_g)
circuit_uhf.update_quantum_state(state_g)
### Construct Ug test
circuit_ug = QuantumCircuit(n_qubits)
### Hadamard on anc
circuit_ug.add_H_gate(anc)
controlled_Ug(circuit_ug, n_qubits, anc, np.arccos(beta[i]))
circuit_ug.add_H_gate(anc)
circuit_ug.update_quantum_state(state_g)
### Probabilities for getting 0 and 1 in ancilla qubit ###
p0 = state_g.get_marginal_probability(p0_list)
p1 = 1 - p0
### Compute expectation value <HUg> ###
HUg.append(sample_observable(state_g,
qulacs_hamiltonianZ,
i_sample).real)
### <S2Ug> ###
S2Ug.append(sample_observable(state_g,
qulacs_s2Z,
i_sample).real)
#S2Ug.append(qulacs_s2Z.get_expectation_value(state_g))
#Ug.append(p0 - p1)
Ug.append(sample_observable(state_g,
qulacs_ancZ,
i_sample).real)
### Norm accumulation ###
Norm += wg[i]*g[i]
sampleHUg[icyc, i] = HUg[i]
sampleS2Ug[icyc, i] = S2Ug[i]
sampleUg[icyc, i] = Ug[i]
#print(f"{p0=} {p1=} {p0-p1=}")
sampleHUg1.append(HUg[0])
sampleHUg2.append(HUg[1])
sampleHUg3.append(HUg[2])
sampleHUg4.append(HUg[3])
sampleS2Ug1.append(S2Ug[0])
sampleS2Ug2.append(S2Ug[1])
sampleS2Ug3.append(S2Ug[2])
SAMpleS2Ug4.append(S2Ug[3])
sampleUg1.append(Ug[0])
sampleUg2.append(Ug[1])
sampleUg3.append(Ug[2])
sampleUg4.append(Ug[3])
### Energy calculation <HP>/<P> and <S**2P>/<P> ###
Ephf = 0
for i in range(Ng):
Ephf += wg[i]*HUg[i]/Norm
S2 += wg[i]*S2Ug[i]/Norm
#print(f" E[PHF] = {Ephf} <S**2> = {S2} (Nsample = {i_sample})")
Ephf += coef0_H
S2 += coef0_S2
sampleEn[icyc, 0] = Ephf
sampleS2[icyc, 0] = S2
#print(f"(n_sample = {i_sample}): sample HUg1\n", sampleHUg1)
#print(f"(n_sample = {i_sample}): sample HUg2\n", sampleHUg2)
#print(f"(n_sample = {i_sample}): sample HUg3\n", sampleHUg3)
#print(f"(n_sample = {i_sample}): sample HUg4\n", sampleHUg4)
#print(f"(n_sample = {i_sample}): sample S2Ug1\n", sampleS2Ug1)
#print(f"(n_sample = {i_sample}): sample S2Ug2\n", sampleS2Ug2)
#print(f"(n_sample = {i_sample}): sample S2Ug3\n", sampleS2Ug3)
#print(f"(n_sample = {i_sample}): sample S2Ug4\n", sampleS2Ug4)
#print(f"(n_sample = {i_sample}): sample Ug1\n", sampleUg1)
#print(f"(n_sample = {i_sample}): sample Ug2\n", sampleUg2)
#print(f"(n_sample = {i_sample}): sample Ug3\n", sampleUg3)
#print(f"(n_sample = {i_sample}): sample Ug4\n", sampleUg4)
#print(f"(n_sample = {i_sample}): sample HUg1\n", sampleHUg1)
#print(f"(n_sample = {i_sample}): sample HUg2\n", sampleHUg2)
#print(f"(n_sample = {i_sample}): sample HUg3\n", sampleHUg3)
#print(f"(n_sample = {i_sample}): sample HUg4\n", sampleHUg4)
#print(f"(n_sample = {i_sample}): sample En\n", sampleEn)
#print(f"(n_sample = {i_sample}): sample S2\n", sampleS2)
with open(f"./HUg_{i_sample}.csv", "w") as fHUg:
writer = csv.writer(fHUg)
writer.writerows(sampleHUg)
with open(f"./S2Ug_{i_sample}.csv", "w") as fS2Ug:
writer = csv.writer(fS2Ug)
writer.writerows(sampleS2Ug)
with open(f"./Ug_{i_sample}.csv", "w") as fUg:
writer = csv.writer(fUg)
writer.writerows(sampleUg)
with open(f"./En_{i_sample}.csv", "w") as fEn:
writer = csv.writer(fEn)
writer.writerows(sampleEn)
with open(f"./S2_{i_sample}.csv", "w") as fS2:
writer = csv.writer(fS2)
writer.writerows(sampleS2)
return Ephf, S2
| 38.864608 | 111 | 0.557634 | 3,888 | 32,724 | 4.475823 | 0.087706 | 0.030571 | 0.017929 | 0.02017 | 0.834847 | 0.82071 | 0.801115 | 0.782669 | 0.772785 | 0.761981 | 0 | 0.04309 | 0.326274 | 32,724 | 841 | 112 | 38.91082 | 0.746224 | 0.30305 | 0 | 0.708502 | 0 | 0 | 0.028646 | 0.001901 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018219 | false | 0 | 0.026316 | 0 | 0.062753 | 0.044534 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d4fe5b37f3fb0de8b309053711c57b222545cba8 | 20,155 | py | Python | tests/plugins/product/sync/test_capabilities.py | cloudblue/product-sync | 0e1754967830b19673c1625b82ae1535658ec3bc | [
"Apache-2.0"
] | null | null | null | tests/plugins/product/sync/test_capabilities.py | cloudblue/product-sync | 0e1754967830b19673c1625b82ae1535658ec3bc | [
"Apache-2.0"
] | null | null | null | tests/plugins/product/sync/test_capabilities.py | cloudblue/product-sync | 0e1754967830b19673c1625b82ae1535658ec3bc | [
"Apache-2.0"
] | null | null | null | from copy import deepcopy
import pytest
from connect.cli.plugins.shared.sync_stats import SynchronizerStats
from connect.cli.plugins.product.sync.capabilities import CapabilitiesSynchronizer
from connect.client import ConnectClient
def test_no_action(get_sync_capabilities_env):
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open('./tests/fixtures/capabilities_sync.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 0,
'deleted': 0, 'skipped': 9, 'errors': 0,
}
def test_invalid_capability(fs, get_sync_capabilities_env):
get_sync_capabilities_env['Capabilities']['A2'].value = 'Invented'
get_sync_capabilities_env['Capabilities']['B2'].value = 'update'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 0,
'deleted': 0, 'skipped': 8, 'errors': 1,
}
assert stats['Capabilities']._row_errors == {2: ['Capability Invented is not valid capability']}
def test_invalid_usage_schema(fs, get_sync_capabilities_env):
get_sync_capabilities_env['Capabilities']['B2'].value = 'update'
get_sync_capabilities_env['Capabilities']['C2'].value = 'magic'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 0,
'deleted': 0, 'skipped': 8, 'errors': 1,
}
assert stats['Capabilities']._row_errors == {2: ['Schema magic is not supported']}
def test_invalid_tier_level(fs, get_sync_capabilities_env):
get_sync_capabilities_env['Capabilities']['B8'].value = 'update'
get_sync_capabilities_env['Capabilities']['C8'].value = 'magic'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 0,
'deleted': 0, 'skipped': 8, 'errors': 1,
}
assert stats['Capabilities']._row_errors == {
8: ['magic is not valid for Reseller Authorization level capability'],
}
def test_invalid_value(fs, get_sync_capabilities_env):
get_sync_capabilities_env['Capabilities']['B10'].value = 'update'
get_sync_capabilities_env['Capabilities']['C10'].value = 'magic'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 0,
'deleted': 0, 'skipped': 8, 'errors': 1,
}
assert stats['Capabilities']._row_errors == {
10: ['Administrative Hold may be Enabled or Disabled, but not magic'],
}
def test_ppu_enable_qt(
fs,
get_sync_capabilities_env,
mocked_responses,
mocked_product_response,
):
get_sync_capabilities_env['Capabilities']['B2'].value = 'update'
get_sync_capabilities_env['Capabilities']['C2'].value = 'QT'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
response = deepcopy(mocked_product_response)
response['capabilities']['ppu'] = {
'schema': 'QT',
}
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_change_schema(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
):
get_sync_capabilities_env_ppu_enabled['Capabilities']['B2'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities']['C2'].value = 'TR'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
response = deepcopy(mocked_product_response)
response['capabilities']['ppu'] = {
'schema': 'QT',
}
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_disable(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
):
get_sync_capabilities_env_ppu_enabled['Capabilities']['B2'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities']['C2'].value = 'Disabled'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
response = deepcopy(mocked_product_response)
response['capabilities']['ppu'] = {
'schema': 'QT',
}
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_dynamic_items_no_ppu(
fs,
get_sync_capabilities_env,
mocked_responses,
):
get_sync_capabilities_env['Capabilities']['B3'].value = 'update'
get_sync_capabilities_env['Capabilities']['C3'].value = 'Enabled'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 0,
'deleted': 0, 'skipped': 8, 'errors': 1,
}
assert stats['Capabilities']._row_errors == {
3: ["Dynamic items support can't be enabled without Pay-as-you-go support"],
}
def test_ppu_dynamic_items_no_ppu_no_enabled(
fs,
get_sync_capabilities_env,
mocked_responses,
):
get_sync_capabilities_env['Capabilities']['B3'].value = 'update'
get_sync_capabilities_env['Capabilities']['C3'].value = 'Disabled'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_enable_dynamic(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
):
get_sync_capabilities_env_ppu_enabled['Capabilities']['B3'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities']['C3'].value = 'Enabled'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
response = deepcopy(mocked_product_response)
response['capabilities']['ppu'] = {
'schema': 'QT',
}
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_disable_dynamic(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
):
get_sync_capabilities_env_ppu_enabled['Capabilities']['B3'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities']['C3'].value = 'Disabled'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
response = deepcopy(mocked_product_response)
response['capabilities']['ppu'] = {
'schema': 'QT',
}
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_future_no_ppu(
fs,
get_sync_capabilities_env,
mocked_responses,
):
get_sync_capabilities_env['Capabilities']['B4'].value = 'update'
get_sync_capabilities_env['Capabilities']['C4'].value = 'Enabled'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 0,
'deleted': 0, 'skipped': 8, 'errors': 1,
}
assert stats['Capabilities']._row_errors == {
4: ["Report of future charges can't be enabled without Pay-as-you-go support"],
}
def test_ppu_future_no_ppu_no_enabled(
fs,
get_sync_capabilities_env,
mocked_responses,
):
get_sync_capabilities_env['Capabilities']['B4'].value = 'update'
get_sync_capabilities_env['Capabilities']['C4'].value = 'Disabled'
get_sync_capabilities_env.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_enable_future(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
):
get_sync_capabilities_env_ppu_enabled['Capabilities']['B4'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities']['C4'].value = 'Enabled'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
response = deepcopy(mocked_product_response)
response['capabilities']['ppu'] = {
'schema': 'QT',
}
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
def test_ppu_disable_future(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
):
get_sync_capabilities_env_ppu_enabled['Capabilities']['B4'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities']['C4'].value = 'Disabled'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
response = deepcopy(mocked_product_response)
response['capabilities']['ppu'] = {
'schema': 'QT',
}
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
@pytest.mark.parametrize(
('row_action',),
(
(5,),
(6,),
(7,),
(8,),
(9,),
(10,),
),
)
def test_ppu_disable_feature(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
row_action,
):
get_sync_capabilities_env_ppu_enabled['Capabilities'][f'B{row_action}'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities'][f'C{row_action}'].value = 'Disabled'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=mocked_product_response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
@pytest.mark.parametrize(
('row_action',),
(
(5,),
(6,),
(7,),
(9,),
(10,),
),
)
def test_features_enable_future(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
row_action,
):
get_sync_capabilities_env_ppu_enabled['Capabilities'][f'B{row_action}'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities'][f'C{row_action}'].value = 'Enabled'
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=mocked_product_response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
@pytest.mark.parametrize(
('tier_level',),
(
(1,),
(2,),
),
)
def test_tier_level_feature(
fs,
get_sync_capabilities_env_ppu_enabled,
mocked_responses,
mocked_product_response,
tier_level,
):
get_sync_capabilities_env_ppu_enabled['Capabilities']['B8'].value = 'update'
get_sync_capabilities_env_ppu_enabled['Capabilities']['C8'].value = tier_level
get_sync_capabilities_env_ppu_enabled.save(f'{fs.root_path}/test.xlsx')
stats = SynchronizerStats()
synchronizer = CapabilitiesSynchronizer(
client=ConnectClient(
use_specs=False,
api_key='ApiKey SU:123',
endpoint='https://localhost/public/v1',
),
silent=True,
stats=stats,
)
mocked_responses.add(
method='PUT',
url='https://localhost/public/v1/products/PRD-276-377-545',
json=mocked_product_response,
)
synchronizer.open(f'{fs.root_path}/test.xlsx', 'Capabilities')
synchronizer.sync()
assert stats['Capabilities'].get_counts_as_dict() == {
'processed': 9, 'created': 0, 'updated': 1,
'deleted': 0, 'skipped': 8, 'errors': 0,
}
| 29.423358 | 100 | 0.63607 | 2,232 | 20,155 | 5.486559 | 0.064516 | 0.096685 | 0.113261 | 0.131145 | 0.945207 | 0.94292 | 0.94096 | 0.932386 | 0.924302 | 0.916871 | 0 | 0.022089 | 0.222823 | 20,155 | 684 | 101 | 29.466374 | 0.759704 | 0 | 0 | 0.784588 | 0 | 0 | 0.240685 | 0.044803 | 0 | 0 | 0 | 0 | 0.043783 | 1 | 0.033275 | false | 0 | 0.008757 | 0 | 0.042032 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
be29d7a0a7df6de9f5543da6170eaf422589607b | 803 | py | Python | cowin_settings/controllers/controllers.py | shangdinvxu/cowinaddons | 4e9d69894cd80e5427ccc9bac6c37b8bd67cadd0 | [
"MIT"
] | null | null | null | cowin_settings/controllers/controllers.py | shangdinvxu/cowinaddons | 4e9d69894cd80e5427ccc9bac6c37b8bd67cadd0 | [
"MIT"
] | null | null | null | cowin_settings/controllers/controllers.py | shangdinvxu/cowinaddons | 4e9d69894cd80e5427ccc9bac6c37b8bd67cadd0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from odoo import http
# class CowinSettings(http.Controller):
# @http.route('/cowin_settings/cowin_settings/', auth='public')
# def index(self, **kw):
# return "Hello, world"
# @http.route('/cowin_settings/cowin_settings/objects/', auth='public')
# def list(self, **kw):
# return http.request.render('cowin_settings.listing', {
# 'root': '/cowin_settings/cowin_settings',
# 'objects': http.request.env['cowin_settings.cowin_settings'].search([]),
# })
# @http.route('/cowin_settings/cowin_settings/objects/<model("cowin_settings.cowin_settings"):obj>/', auth='public')
# def object(self, obj, **kw):
# return http.request.render('cowin_settings.object', {
# 'object': obj
# }) | 40.15 | 120 | 0.616438 | 89 | 803 | 5.404494 | 0.370787 | 0.378378 | 0.224532 | 0.324324 | 0.474012 | 0.405405 | 0.33264 | 0 | 0 | 0 | 0 | 0.001558 | 0.200498 | 803 | 20 | 121 | 40.15 | 0.747664 | 0.929016 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0794376edd67dea5ca881fdb1cdb4330597647be | 1,025 | py | Python | Stock-Hits/ScreenerPath.py | richpaulyim/Web-Tools | edb1898fefc0ddd1f7f2cd134730b8bbc8293fd8 | [
"MIT"
] | 1 | 2021-11-12T03:08:17.000Z | 2021-11-12T03:08:17.000Z | Stock-Hits/ScreenerPath.py | richpaulyim/Web-Briareus | edb1898fefc0ddd1f7f2cd134730b8bbc8293fd8 | [
"MIT"
] | null | null | null | Stock-Hits/ScreenerPath.py | richpaulyim/Web-Briareus | edb1898fefc0ddd1f7f2cd134730b8bbc8293fd8 | [
"MIT"
] | null | null | null | afterClickHTML = '//*[@id="screener-criteria"]/div[2]/div[1]/div[1]/div/div[2]/div/div[2]'
filters = 'C($tertiaryColor) Mstart(12px) Cur(p) Va(m)'
screen_login = 'https://login.yahoo.com/config/login?.src=finance&.intl=us&.lang=en-US&.done=https%3A%2F%2Ffinance.yahoo.com%2Fscreener'
close_filter = '/html/body/div[1]/div/div/div[1]/div/div[2]/div/div/div[5]/div/div/div/div[2]/div[1]/div[1]/div/div[2]/div/div[3]/button'
intra = '//*[@id="screener-criteria"]/div[2]/div[1]/div[1]/div/div[2]/div/div[2]/div[2]/div/ul/li[26]/label/span'
exdelt = '//*[@id="screener-criteria"]/div[2]/div[1]/div[1]/div[2]/div/div/div[2]/div[10]/div/ul/li[1]/label/span/span'
xdelt = '//*[@id="screener-criteria"]/div[2]/div[1]/div[1]/div[2]/div/div/div[2]/div/div/ul/li[1]/label/span'
close_filter = '/html/body/div[1]/div/div/div[1]/div/div[2]/div/div/div[5]/div/div/div/div[2]/div[1]/div[1]/div[2]/div/div/div[2]/div/div/ul/li[1]/label/span'
searchbar = '//*[@id="screener-criteria"]/div[2]/div[1]/div[1]/div[2]/div/div/div[1]/div[2]/input'
| 93.181818 | 158 | 0.660488 | 211 | 1,025 | 3.194313 | 0.241706 | 0.284866 | 0.218101 | 0.163205 | 0.670623 | 0.664688 | 0.639466 | 0.639466 | 0.639466 | 0.639466 | 0 | 0.05835 | 0.030244 | 1,025 | 10 | 159 | 102.5 | 0.619718 | 0 | 0 | 0 | 0 | 0.888889 | 0.866341 | 0.708293 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
07fa5d8a3046ad751d607e91e74b2b4c553571bc | 74 | py | Python | tutorial/math_func.py | progresivoJS/pytest-playground | 8f2924d7a27cce706f1dc0600a91803f6aab0b7a | [
"MIT"
] | null | null | null | tutorial/math_func.py | progresivoJS/pytest-playground | 8f2924d7a27cce706f1dc0600a91803f6aab0b7a | [
"MIT"
] | null | null | null | tutorial/math_func.py | progresivoJS/pytest-playground | 8f2924d7a27cce706f1dc0600a91803f6aab0b7a | [
"MIT"
] | null | null | null | def add(x, y=2):
return x + y
def product(x, y=2):
return x * y
| 10.571429 | 20 | 0.513514 | 16 | 74 | 2.375 | 0.4375 | 0.210526 | 0.157895 | 0.473684 | 0.578947 | 0.578947 | 0 | 0 | 0 | 0 | 0 | 0.04 | 0.324324 | 74 | 6 | 21 | 12.333333 | 0.72 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
6afec290a6728d80f9d58f3811d93284d2909963 | 14,485 | py | Python | ChessAI.py | nadavleh/Chess_AI | ab607a89926822bee9581b9a64a05c63e4071157 | [
"MIT",
"Unlicense"
] | null | null | null | ChessAI.py | nadavleh/Chess_AI | ab607a89926822bee9581b9a64a05c63e4071157 | [
"MIT",
"Unlicense"
] | null | null | null | ChessAI.py | nadavleh/Chess_AI | ab607a89926822bee9581b9a64a05c63e4071157 | [
"MIT",
"Unlicense"
] | null | null | null | import ChessEngine # importing the ChessEngine file we are writing
# import numpy
import copy
class ChessAI:
def __init__(self):
# self.pieceValue = {'P': 10, 'R': 50, 'N': 30, 'B': 30, 'Q': 90, 'K': 900}
self.pieceValue = {'P': 100, 'R': 500, 'N': 320, 'B': 330, 'Q': 900, 'K': 20000}
self.pieceSquaresTables = {
"bP" :(
(0, 0, 0, 0, 0, 0, 0, 0),
(50, 50, 50, 50, 50, 50, 50, 50),
(10, 10, 20, 30, 30, 20, 10, 10),
(5, 5, 10, 25, 25, 10, 5, 5),
(0, 0, 0, 20, 20, 0, 0, 0),
(5, -5,-10, 0, 0,-10, -5, 5),
(5, 10, 10,-20,-20, 10, 10, 5),
(0, 0, 0, 0, 0, 0, 0, 0)
),
"wP" :(
(0, 0, 0, 0, 0, 0, 0, 0),
(5, 10, 10,-20,-20, 10, 10, 5),
(5, -5,-10, 0, 0,-10, -5, 5),
(0, 0, 0, 20, 20, 0, 0, 0),
(5, 5, 10, 25, 25, 10, 5, 5),
(10, 10, 20, 30, 30, 20, 10, 10),
(50, 50, 50, 50, 50, 50, 50, 50),
(0, 0, 0, 0, 0, 0, 0, 0)
),
"bN" :(
(-50,-40,-30,-30,-30,-30,-40,-50),
(-40,-20, 0, 0, 0, 0,-20,-40),
(-30, 0, 10, 15, 15, 10, 0,-30),
(-30, 5, 15, 20, 20, 15, 5,-30),
(-30, 0, 15, 20, 20, 15, 0,-30),
(-30, 5, 10, 15, 15, 10, 5,-30),
(-40,-20, 0, 5, 5, 0,-20,-40),
(-50,-40,-30,-30,-30,-30,-40,-50)
),
"wN" :(
(-50,-40,-30,-30,-30,-30,-40,-50),
(-40,-20, 0, 5, 5, 0,-20,-40),
(-30, 5, 10, 15, 15, 10, 5,-30),
(-30, 0, 15, 20, 20, 15, 0,-30),
(-30, 5, 15, 20, 20, 15, 5,-30),
(-30, 0, 10, 15, 15, 10, 0,-30),
(-40,-20, 0, 0, 0, 0,-20,-40),
(-50,-40,-30,-30,-30,-30,-40,-50)
),
"bB" :(
(-20,-10,-10,-10,-10,-10,-10,-20),
(-10, 0, 0, 0, 0, 0, 0,-10),
(-10, 0, 5, 10, 10, 5, 0,-10),
(-10, 5, 5, 10, 10, 5, 5,-10),
(-10, 0, 10, 10, 10, 10, 0,-10),
(-10, 10, 10, 10, 10, 10, 10,-10),
(-10, 5, 0, 0, 0, 0, 5,-10),
(-20,-10,-10,-10,-10,-10,-10,-20)
),
"wB" :(
(-20,-10,-10,-10,-10,-10,-10,-20),
(-10, 5, 0, 0, 0, 0, 5,-10),
(-10, 10, 10, 10, 10, 10, 10,-10),
(-10, 0, 10, 10, 10, 10, 0,-10),
(-10, 5, 5, 10, 10, 5, 5,-10),
(-10, 0, 5, 10, 10, 5, 0,-10),
(-10, 0, 0, 0, 0, 0, 0,-10),
(-20,-10,-10,-10,-10,-10,-10,-20)
),
"bR" :(
(0, 0, 0, 0, 0, 0, 0, 0),
(5, 10, 10, 10, 10, 10, 10, 5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(0, 0, 0, 5, 5, 0, 0, 0)
),
"wR" :(
(0, 0, 0, 5, 5, 0, 0, 0),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(-5, 0, 0, 0, 0, 0, 0, -5),
(5, 10, 10, 10, 10, 10, 10, 5),
(0, 0, 0, 0, 0, 0, 0, 0)
),
"bQ" :(
(-20,-10,-10, -5, -5,-10,-10,-20),
(-10, 0, 0, 0, 0, 0, 0,-10),
(-10, 0, 5, 5, 5, 5, 0,-10),
(-5, 0, 5, 5, 5, 5, 0, -5),
(0, 0, 5, 5, 5, 5, 0, -5),
(-10, 5, 5, 5, 5, 5, 0,-10),
(-10, 0, 5, 0, 0, 0, 0,-10),
(-20,-10,-10, -5, -5,-10,-10,-20)
),
"wQ" :(
(-20,-10,-10, -5, -5,-10,-10,-20),
(-10, 0, 5, 0, 0, 0, 0,-10),
(-10, 5, 5, 5, 5, 5, 0,-10),
(0, 0, 5, 5, 5, 5, 0, -5),
(-5, 0, 5, 5, 5, 5, 0, -5),
(-10, 0, 5, 5, 5, 5, 0,-10),
(-10, 0, 0, 0, 0, 0, 0,-10),
(-20,-10,-10, -5, -5,-10,-10,-20)
),
"bK" :( # the same as bKMiddleGame, this is just to simplify
(-30,-40,-40,-50,-50,-40,-40,-30),
(-30,-40,-40,-50,-50,-40,-40,-30),
(-30,-40,-40,-50,-50,-40,-40,-30),
(-30,-40,-40,-50,-50,-40,-40,-30),
(-20,-30,-30,-40,-40,-30,-30,-20),
(-10,-20,-20,-20,-20,-20,-20,-10),
(20, 20, 0, 0, 0, 0, 20, 20),
(20, 30, 10, 0, 0, 10, 30, 20)
),
"wK" :( # the same as wKMiddleGame, this is just to simplify
(20, 30, 10, 0, 0, 10, 30, 20),
(20, 20, 0, 0, 0, 0, 20, 20),
(-10,-20,-20,-20,-20,-20,-20,-10),
(-20,-30,-30,-40,-40,-30,-30,-20),
(-30,-40,-40,-50,-50,-40,-40,-30),
(-30,-40,-40,-50,-50,-40,-40,-30),
(-30,-40,-40,-50,-50,-40,-40,-30),
(-30,-40,-40,-50,-50,-40,-40,-30)
)
}
def getBestMove(self, position, depth = 3):
# =============================================================================
# this function is essentially a minimax algoithm implementatiom
# =============================================================================
# position is a gameState object (or rather a reference to one)
if depth == 0 or position.checkmate or position.stalemate:
return (self.staticPositionValue(position), None)
else:
if position.WhiteToMove:
bestScore = -float("inf")
bestMove = None
validMoves = position.getValidMoves()
for move in validMoves:
new_position = copy.deepcopy(position)
new_position.makeMove(move)
score, foo = self.getBestMove(new_position,depth-1)
if score > bestScore:
bestScore = score
bestMove = move
return (bestScore, bestMove)
else:
bestScore = float("inf")
bestMove = None
validMoves = position.getValidMoves()
for move in validMoves:
new_position = copy.deepcopy(position)
new_position.makeMove(move)
score, foo = self.getBestMove(new_position,depth-1)
if score < bestScore:
bestScore = score
bestMove = move
return (bestScore, bestMove)
def alphaBeta(self, position, depth = 3 , alpha = -float("inf"), beta = float("inf")):
# print("entered at depth =", depth)
# =============================================================================
# this function is a minimax algoithm implementatiom with alpha-beta prunning
# =============================================================================
# position is a gameState object (or rather a reference to one)
if depth == 0 or position.checkmate or position.stalemate:
return (self.staticPositionValue(position), None)
else:
if position.WhiteToMove:
bestMove = None
validMoves = position.getValidMoves()
for move in validMoves:
position.makeMove(move)
score, foo = self.alphaBeta(position,depth-1)
position.undoMove()
if score > alpha:
alpha = score
bestMove = move
if alpha >= beta:
print("prunning occured w")
break
return (alpha, bestMove)
else:
bestMove = None
validMoves = position.getValidMoves()
for move in validMoves:
position.makeMove(move)
score, foo = self.alphaBeta(position,depth-1)
position.undoMove()
if score < beta:
beta = score
bestMove = move
if alpha >= beta:
print("prunning occured b")
break
return (beta, bestMove)
def staticPositionValue(self, gameState):
# =============================================================================
# this function is evaluates a position given in gameState.board by summing all the pieces values
# (blacks values are the same as whites but negated) aswell as summing the square values of each respected
# piece, available in the pieceSquaresTables dictionary
# =============================================================================
sum = 0
if gameState.checkmate:
if gameState.WhiteToMove:
return float("inf")
else:
return -float("inf")
elif gameState.stalemate:
return 0
for r in range(8):
for c in range(8):
if gameState.board[r][c][0] == 'w':
sum += self.pieceValue[ gameState.board[r][c][1] ]
sum += self.pieceSquaresTables[ gameState.board[r][c] ][r][c]
elif gameState.board[r][c][0] == 'b':
sum -= self.pieceValue[ gameState.board[r][c][1] ]
sum -= self.pieceSquaresTables[ gameState.board[r][c] ][r][c]
return sum
# self.pieceValue = {'P': 100, 'R': 500, 'N': 320, 'B': 330, 'Q': 900, 'K': 20000}
# def alphaBeta(self, position, depth = 3 , alpha = -float("inf"), beta = float("inf")):
# # position is a gameState object (or rather a reference to one)
# if depth == 0 or position.checkmate or position.stalemate:
# return (self.staticPositionValue(position), None)
# else:
# if position.WhiteToMove:
# bestMove = None
# validMoves = position.getValidMoves()
# for move in validMoves:
# new_position = copy.deepcopy(position)
# new_position.makeMove(move)
# score, foo = self.getBestMove(new_position,depth-1)
# if score > alpha:
# alpha = score
# bestMove = move
# if alpha >= beta:
# break
# return (alpha, bestMove)
# else:
# bestMove = None
# validMoves = position.getValidMoves()
# for move in validMoves:
# new_position = copy.deepcopy(position)
# new_position.makeMove(move)
# score, foo = self.getBestMove(new_position,depth-1)
# if score < beta:
# beta = score
# bestMove = move
# if alpha >= beta:
# break
# return (beta, bestMove)
# def getBestMove(self, position, depth = 3):
# # position is a gameState object (or rather a reference to one)
# if depth == 0 or position.checkmate or position.stalemate:
# return [self.staticPositionValue(position), None]
# else:
# if position.WhiteToMove:
# bestScore = -float("inf")
# bestMove = None
# validMoves = position.getValidMoves()
# for move in validMoves:
# new_position = copy.deepcopy(position)
# new_position.makeMove(move)
# v = self.getBestMove(new_position,depth-1)
# if v[0] > bestScore:
# bestScore = v[0]
# bestMove = v[1]
# return [bestScore, bestMove]
# else:
# bestScore = float("inf")
# bestMove = None
# validMoves = position.getValidMoves()
# for move in validMoves:
# new_position = copy.deepcopy(position)
# new_position.makeMove(move)
# v = self.getBestMove(new_position,depth-1)
# if v[0] < bestScore:
# bestScore = v[0]
# bestMove = v[1]
# return [bestScore, bestMove]
# PAWN_TABLE = numpy.array([
# [ 0, 0, 0, 0, 0, 0, 0, 0],
# [ 5, 10, 10,-20,-20, 10, 10, 5],
# [ 5, -5,-10, 0, 0,-10, -5, 5],
# [ 0, 0, 0, 20, 20, 0, 0, 0],
# [ 5, 5, 10, 25, 25, 10, 5, 5],
# [10, 10, 20, 30, 30, 20, 10, 10],
# [50, 50, 50, 50, 50, 50, 50, 50],
# [ 0, 0, 0, 0, 0, 0, 0, 0]
# ])
# KNIGHT_TABLE = numpy.array([
# [-50, -40, -30, -30, -30, -30, -40, -50],
# [-40, -20, 0, 5, 5, 0, -20, -40],
# [-30, 5, 10, 15, 15, 10, 5, -30],
# [-30, 0, 15, 20, 20, 15, 0, -30],
# [-30, 5, 15, 20, 20, 15, 0, -30],
# [-30, 0, 10, 15, 15, 10, 0, -30],
# [-40, -20, 0, 0, 0, 0, -20, -40],
# [-50, -40, -30, -30, -30, -30, -40, -50]
# ])
# BISHOP_TABLE = numpy.array([
# [-20, -10, -10, -10, -10, -10, -10, -20],
# [-10, 5, 0, 0, 0, 0, 5, -10],
# [-10, 10, 10, 10, 10, 10, 10, -10],
# [-10, 0, 10, 10, 10, 10, 0, -10],
# [-10, 5, 5, 10, 10, 5, 5, -10],
# [-10, 0, 5, 10, 10, 5, 0, -10],
# [-10, 0, 0, 0, 0, 0, 0, -10],
# [-20, -10, -10, -10, -10, -10, -10, -20]
# ])
# ROOK_TABLE = numpy.array([
# [ 0, 0, 0, 5, 5, 0, 0, 0],
# [-5, 0, 0, 0, 0, 0, 0, -5],
# [-5, 0, 0, 0, 0, 0, 0, -5],
# [-5, 0, 0, 0, 0, 0, 0, -5],
# [-5, 0, 0, 0, 0, 0, 0, -5],
# [-5, 0, 0, 0, 0, 0, 0, -5],
# [ 5, 10, 10, 10, 10, 10, 10, 5],
# [ 0, 0, 0, 0, 0, 0, 0, 0]
# ])
# QUEEN_TABLE = numpy.array([
# [-20, -10, -10, -5, -5, -10, -10, -20],
# [-10, 0, 5, 0, 0, 0, 0, -10],
# [-10, 5, 5, 5, 5, 5, 0, -10],
# [ 0, 0, 5, 5, 5, 5, 0, -5],
# [ -5, 0, 5, 5, 5, 5, 0, -5],
# [-10, 0, 5, 5, 5, 5, 0, -10],
# [-10, 0, 0, 0, 0, 0, 0, -10],
# [-20, -10, -10, -5, -5, -10, -10, -20]
# ])
| 36.670886 | 111 | 0.37853 | 1,822 | 14,485 | 2.994512 | 0.079034 | 0.085411 | 0.094575 | 0.087243 | 0.853556 | 0.833028 | 0.810301 | 0.810117 | 0.767779 | 0.755315 | 0 | 0.202584 | 0.412151 | 14,485 | 394 | 112 | 36.763959 | 0.438168 | 0.410839 | 0 | 0.726829 | 0 | 0 | 0.010252 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019512 | false | 0 | 0.009756 | 0 | 0.082927 | 0.009756 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ed0ef48b655c6ff6e9e4e4a5413116852f6c0e06 | 3,862 | py | Python | tests/commands/test_race.py | YusukeKambara/japan_horse_racing | 05c2e06fe265c5744b908b8575df260db18a115b | [
"MIT"
] | null | null | null | tests/commands/test_race.py | YusukeKambara/japan_horse_racing | 05c2e06fe265c5744b908b8575df260db18a115b | [
"MIT"
] | 1 | 2021-12-13T20:32:18.000Z | 2021-12-13T20:32:18.000Z | tests/commands/test_race.py | YusukeKambara/japan_horse_racing | 05c2e06fe265c5744b908b8575df260db18a115b | [
"MIT"
] | null | null | null | import unittest
from datasource.netkeiba import io as netkeiba
from src.commands import race as commands_race
class TestRace(unittest.TestCase):
"""Test class for testing the commands.race module
Arguments:
unittest {[type]} -- [description]
"""
@classmethod
def setUpClass(cls):
print("*" * 80 + "\nStart to test [commands.race] module\n" + "*" * 80)
def test_get_result_with_invaild_params(self):
"""Testing to occurred error if the argument's params are invaild
"""
params = {
netkeiba.url_params.PID: netkeiba.pid_list.RACE_LIST,
netkeiba.url_params.WORD: None,
netkeiba.url_params.START_YEAR: 9999,
netkeiba.url_params.START_MONTH: None,
netkeiba.url_params.END_YEAR: None,
netkeiba.url_params.END_MONTH: None
}
assert commands_race.get_result(params) is None
def test_get_result_with_race_name(self):
"""Testing to get the race result with argument's name
"""
params = {
netkeiba.url_params.PID: netkeiba.pid_list.RACE_LIST,
netkeiba.url_params.WORD: "有馬記念",
netkeiba.url_params.START_YEAR: None,
netkeiba.url_params.START_MONTH: None,
netkeiba.url_params.END_YEAR: None,
netkeiba.url_params.END_MONTH: None
}
df = commands_race.get_result(params)
assert all(["有馬記念" in race_name for race_name in df["race_name"].to_list()])
def test_get_result_with_two_years(self):
"""Testing to get the race result with argument's years
"""
params = {
netkeiba.url_params.PID: netkeiba.pid_list.RACE_LIST,
netkeiba.url_params.WORD: "有馬記念",
netkeiba.url_params.START_YEAR: 2018,
netkeiba.url_params.START_MONTH: None,
netkeiba.url_params.END_YEAR: 2019,
netkeiba.url_params.END_MONTH: None
}
df = commands_race.get_result(params)
assert any([dt.year == 2018 for dt in df["date"].to_list()])
assert any([dt.year == 2019 for dt in df["date"].to_list()])
def test_get_details_with_invaild_params(self):
"""Testing to occurred error if the argument's params are invaild
"""
params = {
netkeiba.url_params.PID: netkeiba.pid_list.RACE_LIST,
netkeiba.url_params.WORD: None,
netkeiba.url_params.START_YEAR: 9999,
netkeiba.url_params.START_MONTH: None,
netkeiba.url_params.END_YEAR: None,
netkeiba.url_params.END_MONTH: None
}
assert commands_race.get_details(params) is None
def test_get_details_with_race_name(self):
"""Testing to get the race result with argument's name
"""
params = {
netkeiba.url_params.PID: netkeiba.pid_list.RACE_LIST,
netkeiba.url_params.WORD: "有馬記念",
netkeiba.url_params.START_YEAR: None,
netkeiba.url_params.START_MONTH: None,
netkeiba.url_params.END_YEAR: None,
netkeiba.url_params.END_MONTH: None
}
df = commands_race.get_details(params)
assert all(["有馬記念" in race_name for race_name in df["race_name"].to_list()])
def test_get_details_with_years(self):
"""Testing to get the race result with argument's year
"""
params = {
netkeiba.url_params.PID: netkeiba.pid_list.RACE_LIST,
netkeiba.url_params.WORD: "有馬記念",
netkeiba.url_params.START_YEAR: 2019,
netkeiba.url_params.START_MONTH: None,
netkeiba.url_params.END_YEAR: 2019,
netkeiba.url_params.END_MONTH: None
}
df = commands_race.get_details(params)
assert any([dt.year == 2019 for dt in df["date"].to_list()]) | 39.814433 | 84 | 0.62869 | 496 | 3,862 | 4.643145 | 0.135081 | 0.17195 | 0.26574 | 0.12766 | 0.877985 | 0.849761 | 0.830656 | 0.810248 | 0.810248 | 0.810248 | 0 | 0.014291 | 0.275246 | 3,862 | 97 | 85 | 39.814433 | 0.808503 | 0.12377 | 0 | 0.666667 | 0 | 0 | 0.02889 | 0 | 0 | 0 | 0 | 0 | 0.097222 | 1 | 0.097222 | false | 0 | 0.041667 | 0 | 0.152778 | 0.013889 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed35f4a434a1ceebeed4bc2b774b1af0979e2400 | 31,963 | py | Python | tests/countminsketch_test.py | dekoza/pyprobables | 9460471d7e391060874d342945b2352d0a35603f | [
"MIT"
] | null | null | null | tests/countminsketch_test.py | dekoza/pyprobables | 9460471d7e391060874d342945b2352d0a35603f | [
"MIT"
] | null | null | null | tests/countminsketch_test.py | dekoza/pyprobables | 9460471d7e391060874d342945b2352d0a35603f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
''' Unittest class '''
from __future__ import (unicode_literals, absolute_import, print_function)
import unittest
import os
from probables import (CountMinSketch, HeavyHitters, StreamThreshold,
CountMeanSketch, CountMeanMinSketch)
from probables.exceptions import (InitializationError, NotSupportedError)
from probables.constants import (INT32_T_MIN, INT32_T_MAX, INT64_T_MAX,
INT64_T_MIN)
from . utilities import(calc_file_md5, different_hash)
class TestCountMinSketch(unittest.TestCase):
''' Test the default count-min sketch implementation '''
def test_cms_init_wd(self):
''' Test count-min sketch initialization using depth and width '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.width, 1000)
self.assertEqual(cms.depth, 5)
self.assertEqual(cms.confidence, 0.96875)
self.assertEqual(cms.error_rate, 0.002)
self.assertEqual(cms.elements_added, 0)
def test_cms_init_ce(self):
''' Test count-min sketch initialization using confidence and error
rate '''
cms = CountMinSketch(confidence=0.96875, error_rate=0.002)
self.assertEqual(cms.width, 1000)
self.assertEqual(cms.depth, 5)
self.assertEqual(cms.confidence, 0.96875)
self.assertEqual(cms.error_rate, 0.002)
self.assertEqual(cms.elements_added, 0)
def test_cms_init_error(self):
''' Test count-min sketch initialization without enough params '''
self.assertRaises(InitializationError,
lambda: CountMinSketch(width=1000))
def test_cms_init_error_msg(self):
''' Test count-min sketch initialization without enough params '''
try:
CountMinSketch(width=1000)
except InitializationError as ex:
msg = ('Must provide one of the following to initialize the '
'Count-Min Sketch:\n'
' A file to load,\n'
' The width and depth,\n'
' OR confidence and error rate')
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_set_query_type(self):
''' test setting different query types '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.query_type, 'min')
cms.query_type = 'mean-min'
self.assertEqual(cms.query_type, 'mean-min')
cms.query_type = 'mean'
self.assertEqual(cms.query_type, 'mean')
cms.query_type = 'unknown'
self.assertEqual(cms.query_type, 'min')
def test_cms_add_single(self):
''' test the insertion of a single element at a time '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test'), 1)
self.assertEqual(cms.add('this is a test'), 2)
self.assertEqual(cms.add('this is a test'), 3)
self.assertEqual(cms.add('this is a test'), 4)
self.assertEqual(cms.elements_added, 4)
def test_cms_add_mult(self):
''' test the insertion of multiple elements at a time '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 4), 4)
self.assertEqual(cms.add('this is a test', 4), 8)
self.assertEqual(cms.add('this is a test', 4), 12)
self.assertEqual(cms.add('this is a test', 4), 16)
self.assertEqual(cms.elements_added, 16)
def test_cms_remove_single(self):
''' test the removal of a single element at a time '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 4), 4)
self.assertEqual(cms.elements_added, 4)
self.assertEqual(cms.remove('this is a test'), 3)
self.assertEqual(cms.remove('this is a test'), 2)
self.assertEqual(cms.elements_added, 2)
def test_cms_remove_mult(self):
''' test the removal of multiple elements at a time '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 16), 16)
self.assertEqual(cms.elements_added, 16)
self.assertEqual(cms.remove('this is a test', 4), 12)
self.assertEqual(cms.elements_added, 12)
def test_cms_check_min(self):
''' test checking number elements using min algorithm '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 255), 255)
self.assertEqual(cms.add('this is another test', 189), 189)
self.assertEqual(cms.add('this is also a test', 16), 16)
self.assertEqual(cms.add('this is something to test', 5), 5)
self.assertEqual(cms.check('this is something to test'), 5)
self.assertEqual(cms.check('this is also a test'), 16)
self.assertEqual(cms.check('this is another test'), 189)
self.assertEqual(cms.check('this is a test'), 255)
self.assertEqual(cms.elements_added, 5 + 16 + 189 + 255)
def test_cms_check_min_called(self):
''' test checking number elements using min algorithm called out '''
cms = CountMinSketch(width=1000, depth=5)
cms.query_type = None
self.assertEqual(cms.add('this is a test', 255), 255)
self.assertEqual(cms.add('this is another test', 189), 189)
self.assertEqual(cms.add('this is also a test', 16), 16)
self.assertEqual(cms.add('this is something to test', 5), 5)
self.assertEqual(cms.check('this is something to test'), 5)
self.assertEqual(cms.check('this is also a test'), 16)
self.assertEqual(cms.check('this is another test'), 189)
self.assertEqual(cms.check('this is a test'), 255)
self.assertEqual(cms.elements_added, 5 + 16 + 189 + 255)
def test_cms_check_mean_called(self):
''' test checking number elements using mean algorithm called out '''
cms = CountMinSketch(width=1000, depth=5)
cms.query_type = 'mean'
self.assertEqual(cms.add('this is a test', 255), 255)
self.assertEqual(cms.add('this is another test', 189), 189)
self.assertEqual(cms.add('this is also a test', 16), 16)
self.assertEqual(cms.add('this is something to test', 5), 5)
self.assertEqual(cms.check('this is something to test'), 5)
self.assertEqual(cms.check('this is also a test'), 16)
self.assertEqual(cms.check('this is another test'), 189)
self.assertEqual(cms.check('this is a test'), 255)
self.assertEqual(cms.elements_added, 5 + 16 + 189 + 255)
def test_cms_check_mean_min_called(self):
''' test checking number elements using mean-min algorithm called
out '''
cms = CountMinSketch(width=1000, depth=5)
cms.query_type = 'mean-min'
self.assertEqual(cms.add('this is a test', 255), 255)
self.assertEqual(cms.add('this is another test', 189), 189)
self.assertEqual(cms.add('this is also a test', 16), 16)
self.assertEqual(cms.add('this is something to test', 5), 5)
self.assertEqual(cms.check('this is something to test'), 5)
self.assertEqual(cms.check('this is also a test'), 16)
self.assertEqual(cms.check('this is another test'), 189)
self.assertEqual(cms.check('this is a test'), 255)
self.assertEqual(cms.elements_added, 5 + 16 + 189 + 255)
def test_cms_check_mean_called_even(self):
''' test checking number elements using mean algorithm called out when
the depth is an even number... '''
cms = CountMinSketch(width=1000, depth=6)
cms.query_type = 'mean-min'
self.assertEqual(cms.add('this is a test', 255), 255)
self.assertEqual(cms.add('this is another test', 189), 189)
self.assertEqual(cms.add('this is also a test', 16), 16)
self.assertEqual(cms.add('this is something to test', 5), 5)
self.assertEqual(cms.check('this is something to test'), 5)
self.assertEqual(cms.check('this is also a test'), 16)
self.assertEqual(cms.check('this is another test'), 189)
self.assertEqual(cms.check('this is a test'), 255)
self.assertEqual(cms.elements_added, 5 + 16 + 189 + 255)
def test_cms_export(self):
''' test exporting a count-min sketch '''
md5_val = '61d2ea9d0cb09b7bb284e1cf1a860449'
filename = 'test.cms'
cms = CountMinSketch(width=1000, depth=5)
cms.add('this is a test', 100)
cms.export(filename)
md5_out = calc_file_md5(filename)
os.remove(filename)
self.assertEqual(md5_out, md5_val)
def test_cms_load(self):
''' test loading a count-min sketch from file '''
md5_val = '61d2ea9d0cb09b7bb284e1cf1a860449'
filename = 'test.cms'
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 100), 100)
cms.export(filename)
md5_out = calc_file_md5(filename)
self.assertEqual(md5_out, md5_val)
# try loading directly to file!
cms2 = CountMinSketch(filepath=filename)
self.assertEqual(cms2.elements_added, 100)
self.assertEqual(cms2.check('this is a test'), 100)
os.remove(filename)
def test_cms_load_diff_hash(self):
''' test loading a count-min sketch from file '''
md5_val = '61d2ea9d0cb09b7bb284e1cf1a860449'
filename = 'test.cms'
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 100), 100)
cms.export(filename)
md5_out = calc_file_md5(filename)
self.assertEqual(md5_out, md5_val)
cms2 = CountMinSketch(filepath=filename, hash_function=different_hash)
self.assertEqual(cms2.elements_added, 100)
# should not work since it is a different hash
self.assertNotEqual(cms.check('this is a test'), True)
self.assertNotEqual(cms.hashes('this is a test'),
cms2.hashes('this is a test'))
os.remove(filename)
def test_cms_load_invalid_file(self):
''' test loading a count-min sketch from invalid file '''
filename = 'invalid.cms'
self.assertRaises(InitializationError,
lambda: CountMinSketch(filepath=filename))
def test_cms_different_hash(self):
''' test using a different hash function '''
cms = CountMinSketch(width=1000, depth=5)
hashes1 = cms.hashes('this is a test')
cms2 = CountMinSketch(width=1000, depth=5,
hash_function=different_hash)
hashes2 = cms2.hashes('this is a test')
self.assertNotEqual(hashes1, hashes2)
def test_cms_min_val(self):
''' test when we come to the bottom of the 32 bit int
(stop overflow) '''
too_large = INT64_T_MAX + 5
cms = CountMinSketch(width=1000, depth=5)
cms.remove('this is a test', too_large)
self.assertEqual(cms.check('this is a test'), INT32_T_MIN)
self.assertEqual(cms.elements_added, INT64_T_MIN)
def test_cms_max_val(self):
''' test when we come to the top of the 32 bit int
(stop overflow) '''
too_large = INT64_T_MAX + 5
cms = CountMinSketch(width=1000, depth=5)
cms.add('this is a test', too_large)
self.assertEqual(cms.check('this is a test'), INT32_T_MAX)
self.assertEqual(cms.elements_added, INT64_T_MAX)
def test_cms_clear(self):
''' test the clear functionality '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 100), 100)
self.assertEqual(cms.elements_added, 100)
cms.clear()
self.assertEqual(cms.elements_added, 0)
self.assertEqual(cms.check('this is a test'), 0)
def test_cms_str(self):
''' test the string representation of the count-min sketch '''
cms = CountMinSketch(width=1000, depth=5)
self.assertEqual(cms.add('this is a test', 100), 100)
msg = ('Count-Min Sketch:\n'
'\tWidth: 1000\n'
'\tDepth: 5\n'
'\tConfidence: 0.96875\n'
'\tError Rate: 0.002\n'
'\tElements Added: 100')
self.assertEqual(str(cms), msg)
def test_cms_invalid_width(self):
''' test invalid width '''
def runner():
''' runner '''
CountMinSketch(width=0, depth=5)
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_invalid_depth(self):
''' test invalid width '''
def runner():
''' runner '''
CountMinSketch(width=1000, depth=-5)
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_invalid_width_2(self):
''' test invalid width invalid type '''
def runner():
''' runner '''
CountMinSketch(width='0.0', depth=5)
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_invalid_depth_2(self):
''' test invalid depth type '''
def runner():
''' runner '''
CountMinSketch(width=1000, depth=[])
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_invalid_conf(self):
''' test invalid width '''
def runner():
''' runner '''
CountMinSketch(confidence=-3.0, error_rate=0.99)
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_invalid_err_rate(self):
''' test invalid width '''
def runner():
''' runner '''
CountMinSketch(confidence=3.0, error_rate=0)
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_invalid_conf_2(self):
''' test invalid width invalid type '''
def runner():
''' runner '''
CountMinSketch(confidence=3.0, error_rate='0.99')
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_cms_invalid_err_rate_2(self):
''' test invalid error rate invalid type '''
def runner():
''' runner '''
CountMinSketch(width=1000, depth=[])
self.assertRaises(InitializationError, runner)
msg = 'CountMinSketch: width and depth must be greater than 0'
try:
runner()
except InitializationError as ex:
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
class TestHeavyHitters(unittest.TestCase):
''' Test the default heavy hitters implementation '''
def test_heavyhitters_init_wd(self):
''' test initializing heavy hitters '''
hh1 = HeavyHitters(num_hitters=1000, width=1000, depth=5)
self.assertEqual(hh1.width, 1000)
self.assertEqual(hh1.depth, 5)
self.assertEqual(hh1.confidence, 0.96875)
self.assertEqual(hh1.error_rate, 0.002)
self.assertEqual(hh1.elements_added, 0)
self.assertEqual(hh1.heavy_hitters, dict())
self.assertEqual(hh1.number_heavy_hitters, 1000)
def test_heavyhitters_init_ce(self):
''' test initializing heavy hitters '''
hh1 = HeavyHitters(num_hitters=1000, confidence=0.96875,
error_rate=0.002)
self.assertEqual(hh1.width, 1000)
self.assertEqual(hh1.depth, 5)
self.assertEqual(hh1.confidence, 0.96875)
self.assertEqual(hh1.error_rate, 0.002)
self.assertEqual(hh1.elements_added, 0)
self.assertEqual(hh1.heavy_hitters, dict())
self.assertEqual(hh1.number_heavy_hitters, 1000)
def test_heavyhitters_add(self):
''' test adding things (singular) to the heavy hitters '''
hh1 = HeavyHitters(num_hitters=2, width=1000, depth=5)
self.assertEqual(hh1.add('this is a test'), 1)
self.assertEqual(hh1.add('this is a test'), 2)
self.assertEqual(hh1.add('this is a test'), 3)
self.assertEqual(hh1.add('this is also a test'), 1)
self.assertEqual(hh1.add('this is not a test'), 1)
self.assertEqual(hh1.add('this is not a test'), 2)
self.assertEqual(hh1.heavy_hitters,
{'this is a test': 3, 'this is not a test': 2})
self.assertEqual(hh1.add('this is also a test'), 2)
self.assertEqual(hh1.add('this is also a test'), 3)
self.assertEqual(hh1.add('this is also a test'), 4)
self.assertEqual(hh1.heavy_hitters,
{'this is a test': 3, 'this is also a test': 4})
def test_heavyhitters_add_mult(self):
''' test adding things (multiple) to the heavy hitters '''
hh1 = HeavyHitters(num_hitters=2, width=1000, depth=5)
self.assertEqual(hh1.add('this is a test', 3), 3)
self.assertEqual(hh1.add('this is also a test'), 1)
self.assertEqual(hh1.add('this is not a test', 2), 2)
self.assertEqual(hh1.heavy_hitters,
{'this is a test': 3, 'this is not a test': 2})
self.assertEqual(hh1.add('this is also a test', 3), 4)
self.assertEqual(hh1.heavy_hitters,
{'this is a test': 3, 'this is also a test': 4})
self.assertEqual(hh1.add('this is not a test', 2), 4)
self.assertEqual(hh1.add('this is not a test', 2), 6)
self.assertEqual(hh1.add('this is not a test', 2), 8)
self.assertEqual(hh1.add('this is not a test', 2), 10)
self.assertEqual(hh1.heavy_hitters,
{'this is not a test': 10, 'this is also a test': 4})
def test_hh_remove(self):
''' test remove from heavy hitters exception '''
hh1 = HeavyHitters(num_hitters=2, width=1000, depth=5)
self.assertEqual(hh1.add('this is a test', 3), 3)
self.assertRaises(NotSupportedError,
lambda: hh1.remove('this is a test'))
def test_hh_remove_msg(self):
''' test remove from heavy hitters exception message '''
hh1 = HeavyHitters(num_hitters=2, width=1000, depth=5)
self.assertEqual(hh1.add('this is a test', 3), 3)
try:
hh1.remove('this is a test')
except NotSupportedError as ex:
msg = ('Unable to remove elements in the HeavyHitters '
'class as it is an un supported action (and does not'
'make sense)!')
self.assertEqual(str(ex), msg)
else:
self.assertEqual(True, False)
def test_hh_clear(self):
''' test clearing out the heavy hitters object '''
hh1 = HeavyHitters(num_hitters=1000, width=1000, depth=5)
self.assertEqual(hh1.width, 1000)
self.assertEqual(hh1.depth, 5)
self.assertEqual(hh1.confidence, 0.96875)
self.assertEqual(hh1.error_rate, 0.002)
self.assertEqual(hh1.elements_added, 0)
self.assertEqual(hh1.heavy_hitters, dict())
self.assertEqual(hh1.number_heavy_hitters, 1000)
self.assertEqual(hh1.add('this is a test', 3), 3)
self.assertEqual(hh1.elements_added, 3)
self.assertEqual(hh1.heavy_hitters, {'this is a test': 3})
hh1.clear()
self.assertEqual(hh1.elements_added, 0)
self.assertEqual(hh1.heavy_hitters, dict())
def test_hh_export(self):
''' test exporting a heavy hitters sketch '''
md5_val = '61d2ea9d0cb09b7bb284e1cf1a860449'
filename = 'test.cms'
hh1 = HeavyHitters(num_hitters=1000, width=1000, depth=5)
hh1.add('this is a test', 100)
hh1.export(filename)
md5_out = calc_file_md5(filename)
os.remove(filename)
self.assertEqual(md5_out, md5_val)
def test_hh_load(self):
''' test loading a heavy hitters from file '''
md5_val = '61d2ea9d0cb09b7bb284e1cf1a860449'
filename = 'test.cms'
hh1 = HeavyHitters(num_hitters=1000, width=1000, depth=5)
self.assertEqual(hh1.add('this is a test', 100), 100)
self.assertEqual(hh1.elements_added, 100)
self.assertEqual(hh1.heavy_hitters, {'this is a test': 100})
hh1.export(filename)
md5_out = calc_file_md5(filename)
self.assertEqual(md5_out, md5_val)
# try loading directly to file!
hh2 = HeavyHitters(num_hitters=1000, filepath=filename)
self.assertEqual(hh2.width, 1000)
self.assertEqual(hh2.depth, 5)
self.assertEqual(hh2.elements_added, 100)
self.assertEqual(hh2.check('this is a test'), 100)
# show on load that the tracking of heavy hitters is gone
self.assertEqual(hh2.heavy_hitters, dict())
self.assertEqual(hh2.add('this is a test', 1), 101)
self.assertEqual(hh2.heavy_hitters, {'this is a test': 101})
os.remove(filename)
def test_hh_str(self):
''' test the string representation of the heavy hitters sketch '''
hh1 = HeavyHitters(num_hitters=2, width=1000, depth=5)
self.assertEqual(hh1.add('this is a test', 100), 100)
msg = ('Heavy Hitters Count-Min Sketch:\n'
'\tWidth: 1000\n'
'\tDepth: 5\n'
'\tConfidence: 0.96875\n'
'\tError Rate: 0.002\n'
'\tElements Added: 100\n'
'\tNumber Hitters: 2\n'
'\tNumber Recorded: 1')
self.assertEqual(str(hh1), msg)
class TestCountMeanSketch(unittest.TestCase):
''' test the basic count-mean sketch '''
def test_default_count_mean_query(self):
''' test the default query of the count-mean sketch '''
cms = CountMeanSketch(width=1000, depth=5)
self.assertEqual(cms.query_type, 'mean')
class TestCountMeanMinSketch(unittest.TestCase):
''' test the basic count-mean-min sketch '''
def test_def_count_mean_min_query(self):
''' test the default query of the count-mean-min sketch '''
cms = CountMeanMinSketch(width=1000, depth=5)
self.assertEqual(cms.query_type, 'mean-min')
class TestStreamThreshold(unittest.TestCase):
''' Test the default stream threshold implementation '''
def test_streamthreshold_init_wd(self):
''' test initializing the stream threshold using width and depth '''
st1 = StreamThreshold(threshold=1000, width=1000, depth=5)
self.assertEqual(st1.width, 1000)
self.assertEqual(st1.depth, 5)
self.assertEqual(st1.confidence, 0.96875)
self.assertEqual(st1.error_rate, 0.002)
self.assertEqual(st1.elements_added, 0)
self.assertEqual(st1.meets_threshold, dict())
self.assertEqual(st1.threshold, 1000)
def test_streamthreshold_init_ec(self):
''' test initializing the stream threshold using error rate and
confidence '''
st1 = StreamThreshold(threshold=1000, confidence=0.96875,
error_rate=0.002)
self.assertEqual(st1.width, 1000)
self.assertEqual(st1.depth, 5)
self.assertEqual(st1.confidence, 0.96875)
self.assertEqual(st1.error_rate, 0.002)
self.assertEqual(st1.elements_added, 0)
self.assertEqual(st1.meets_threshold, dict())
self.assertEqual(st1.threshold, 1000)
def test_streamthreshold_add(self):
''' test adding elements to the stream threshold in singular '''
st1 = StreamThreshold(threshold=2, width=1000, depth=5)
self.assertEqual(st1.add('this is a test'), 1)
self.assertEqual(st1.meets_threshold, dict())
self.assertEqual(st1.add('this is a test'), 2)
self.assertEqual(st1.meets_threshold, {'this is a test': 2})
self.assertEqual(st1.add('this is not a test'), 1)
self.assertEqual(st1.meets_threshold, {'this is a test': 2})
self.assertEqual(st1.add('this is a test'), 3)
self.assertEqual(st1.meets_threshold, {'this is a test': 3})
self.assertEqual(st1.add('this is not a test'), 2)
self.assertEqual(st1.add('this is still not a test'), 1)
self.assertEqual(st1.meets_threshold,
{'this is a test': 3, 'this is not a test': 2})
self.assertEqual(st1.elements_added, 6)
def test_streamthreshold_add_mult(self):
''' test adding elements to the stream threshold in multiple '''
st1 = StreamThreshold(threshold=10, width=1000, depth=5)
self.assertEqual(st1.add('this is a test', 5), 5)
self.assertEqual(st1.meets_threshold, dict())
self.assertEqual(st1.add('this is a test', 5), 10)
self.assertEqual(st1.meets_threshold, {'this is a test': 10})
self.assertEqual(st1.add('this is not a test', 9), 9)
self.assertEqual(st1.meets_threshold, {'this is a test': 10})
self.assertEqual(st1.add('this is a test', 20), 30)
self.assertEqual(st1.meets_threshold, {'this is a test': 30})
self.assertEqual(st1.add('this is not a test', 2), 11)
self.assertEqual(st1.meets_threshold,
{'this is a test': 30, 'this is not a test': 11})
self.assertEqual(st1.elements_added, 41)
def test_streamthreshold_clear(self):
''' test clearing the stream threshold '''
st1 = StreamThreshold(threshold=10, width=1000, depth=5)
self.assertEqual(st1.add('this is a test', 5), 5)
self.assertEqual(st1.meets_threshold, dict())
self.assertEqual(st1.add('this is a test', 5), 10)
self.assertEqual(st1.meets_threshold, {'this is a test': 10})
self.assertEqual(st1.add('this is not a test', 9), 9)
self.assertEqual(st1.meets_threshold, {'this is a test': 10})
self.assertEqual(st1.add('this is a test', 20), 30)
self.assertEqual(st1.meets_threshold, {'this is a test': 30})
self.assertEqual(st1.add('this is not a test', 2), 11)
self.assertEqual(st1.meets_threshold,
{'this is a test': 30, 'this is not a test': 11})
self.assertEqual(st1.elements_added, 41)
st1.clear()
self.assertEqual(st1.meets_threshold, dict())
self.assertEqual(st1.elements_added, 0)
def test_streamthreshold_remove(self):
''' test removing elements from the stream threshold singular '''
st1 = StreamThreshold(threshold=10, width=1000, depth=5)
self.assertEqual(st1.add('this is a test', 5), 5)
self.assertEqual(st1.meets_threshold, dict())
self.assertEqual(st1.add('this is a test', 5), 10)
self.assertEqual(st1.meets_threshold, {'this is a test': 10})
self.assertEqual(st1.add('this is not a test', 9), 9)
self.assertEqual(st1.meets_threshold, {'this is a test': 10})
self.assertEqual(st1.add('this is a test', 20), 30)
self.assertEqual(st1.meets_threshold, {'this is a test': 30})
self.assertEqual(st1.add('this is not a test', 2), 11)
self.assertEqual(st1.meets_threshold,
{'this is a test': 30, 'this is not a test': 11})
self.assertEqual(st1.remove('this is a test'), 29)
self.assertEqual(st1.meets_threshold,
{'this is a test': 29, 'this is not a test': 11})
self.assertEqual(st1.remove('this is not a test'), 10)
self.assertEqual(st1.remove('this is not a test'), 9)
self.assertEqual(st1.meets_threshold, {'this is a test': 29})
self.assertEqual(st1.elements_added, 38)
def test_streamthreshold_rem_mult(self):
''' test adding elements to the stream threshold in multiple '''
st1 = StreamThreshold(threshold=10, width=1000, depth=5)
self.assertEqual(st1.add('this is a test', 30), 30)
self.assertEqual(st1.add('this is not a test', 11), 11)
self.assertEqual(st1.meets_threshold,
{'this is a test': 30, 'this is not a test': 11})
self.assertEqual(st1.elements_added, 41)
self.assertEqual(st1.remove('this is not a test', 2), 9)
self.assertEqual(st1.meets_threshold, {'this is a test': 30})
self.assertEqual(st1.elements_added, 39)
def test_streamthreshold_export(self):
''' test exporting a stream threshold sketch '''
md5_val = '61d2ea9d0cb09b7bb284e1cf1a860449'
filename = 'test.cms'
st1 = StreamThreshold(threshold=10, width=1000, depth=5)
st1.add('this is a test', 100)
st1.export(filename)
md5_out = calc_file_md5(filename)
os.remove(filename)
self.assertEqual(md5_out, md5_val)
def test_streamthreshold_load(self):
''' test loading a stream threshold sketch from file '''
md5_val = '61d2ea9d0cb09b7bb284e1cf1a860449'
filename = 'test.cms'
st1 = StreamThreshold(threshold=10, width=1000, depth=5)
self.assertEqual(st1.add('this is a test', 100), 100)
self.assertEqual(st1.elements_added, 100)
self.assertEqual(st1.meets_threshold, {'this is a test': 100})
st1.export(filename)
md5_out = calc_file_md5(filename)
self.assertEqual(md5_out, md5_val)
# try loading directly to file!
st2 = StreamThreshold(threshold=10, filepath=filename)
self.assertEqual(st2.width, 1000)
self.assertEqual(st2.depth, 5)
self.assertEqual(st2.elements_added, 100)
self.assertEqual(st2.check('this is a test'), 100)
# show on load that the tracking of stream threshold is gone
self.assertEqual(st2.meets_threshold, dict())
self.assertEqual(st2.add('this is a test', 1), 101)
self.assertEqual(st2.meets_threshold, {'this is a test': 101})
os.remove(filename)
def test_streamthreshold_str(self):
''' test the string representation of the stream threshold sketch '''
st1 = StreamThreshold(threshold=10, width=1000, depth=5)
self.assertEqual(st1.add('this is a test', 100), 100)
msg = ('Stream Threshold Count-Min Sketch:\n'
'\tWidth: 1000\n'
'\tDepth: 5\n'
'\tConfidence: 0.96875\n'
'\tError Rate: 0.002\n'
'\tElements Added: 100\n'
'\tThreshold: 10\n'
'\tNumber Meeting Threshold: 1')
self.assertEqual(str(st1), msg)
| 43.546322 | 78 | 0.621375 | 4,149 | 31,963 | 4.698964 | 0.05664 | 0.206966 | 0.036264 | 0.056986 | 0.848225 | 0.803037 | 0.784674 | 0.744922 | 0.710351 | 0.661828 | 0 | 0.059929 | 0.262335 | 31,963 | 733 | 79 | 43.60573 | 0.766944 | 0.098176 | 0 | 0.640071 | 0 | 0 | 0.150508 | 0.007903 | 0 | 0 | 0 | 0 | 0.501773 | 1 | 0.108156 | false | 0 | 0.012411 | 0 | 0.129433 | 0.001773 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
ed3f3b2785c92c44c143e633a438535605b284fc | 11,695 | py | Python | nnunet/network_architecture/P_Net_Pancreas2.py | Karol-G/nnUNet | a30bdbd64254c94c515ee03617173eb217eea505 | [
"Apache-2.0"
] | 2 | 2022-03-18T12:49:28.000Z | 2022-03-24T14:39:20.000Z | nnunet/network_architecture/P_Net_Pancreas2.py | Karol-G/nnUNet | a30bdbd64254c94c515ee03617173eb217eea505 | [
"Apache-2.0"
] | null | null | null | nnunet/network_architecture/P_Net_Pancreas2.py | Karol-G/nnUNet | a30bdbd64254c94c515ee03617173eb217eea505 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
from nnunet.network_architecture.neural_network import SegmentationNetwork
from nnunet.utilities.nd_softmax import softmax_helper
from nnunet.network_architecture.initialization import InitWeights_He
class P_Net_Pancreas2(SegmentationNetwork):
def __init__(self, patch_size, in_channels=2, out_channels=32, num_classes=2, weightInitializer=InitWeights_He(1e-2), deep_supervision=False, conv_op=None): # or out_channels = 16/64
super(P_Net_Pancreas2, self).__init__()
self.conv_op = conv_op
self.num_classes = num_classes
self.do_ds = False
self.patch_size = [128, 128, 128] # patch_size.tolist()
self.original_size = patch_size.tolist()
# self.patch_size = patch_size.tolist()
self.block1 = nn.Sequential(
nn.Conv3d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=1),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=1), # or kernel_size=[3, 3, 3]
nn.ReLU(),
)
self.block2 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=2),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=2), # or kernel_size=[3, 3, 3]
nn.ReLU(),
)
self.block3 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=3), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=3),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=3),
nn.ReLU(),
)
self.block4 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=4), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=4),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=4),
nn.ReLU(),
)
self.block5 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=5), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=5),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=5),
nn.ReLU(),
)
self.block6 = nn.Sequential(
nn.Conv3d(in_channels=int(out_channels/4)*5, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=1), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=num_classes, kernel_size=3, stride=1, padding=0, dilation=1),
# nn.ReLU(),
)
self.compress1 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress2 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress3 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress4 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress5 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.upsample1 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False) # [96, 160, 160]
self.upsample2 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample3 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample4 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample5 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample6 = nn.Upsample(size=self.original_size, mode='trilinear', align_corners=False)
self.apply(weightInitializer)
def forward(self, x):
x = F.interpolate(x, self.patch_size, mode="trilinear", align_corners=False)
x = self.block1(x)
compress1 = self.compress1(x)
x = self.block2(x)
compress2 = self.compress2(x)
x = self.block3(x)
compress3 = self.compress3(x)
x = self.block4(x)
compress4 = self.compress4(x)
x = self.block5(x)
compress5 = self.compress5(x)
compress1 = self.upsample1(compress1)
compress2 = self.upsample2(compress2)
compress3 = self.upsample3(compress3)
compress4 = self.upsample4(compress4)
compress5 = self.upsample5(compress5)
x = torch.cat((compress1, compress2, compress3, compress4, compress5), dim=1)
x = self.block6(x)
x = self.upsample6(x)
# x = softmax_helper(x)
return x
def compute_approx_vram_consumption(self):
return 715000000
class P_Net2(SegmentationNetwork):
def __init__(self, patch_size, in_channels=2, out_channels=32, num_classes=2, weightInitializer=InitWeights_He(1e-2), deep_supervision=False, conv_op=None): # or out_channels = 16/64
super(P_Net2, self).__init__()
self.conv_op = conv_op
self.num_classes = num_classes
self.do_ds = False
self.patch_size = [128, 128, 128] # patch_size.tolist()
self.original_size = patch_size.tolist()
# self.patch_size = patch_size.tolist()
self.block1 = nn.Sequential(
nn.Conv3d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=1),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=1), # or kernel_size=[3, 3, 3]
nn.ReLU(),
)
self.block2 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=2),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=2), # or kernel_size=[3, 3, 3]
nn.ReLU(),
)
self.block3 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=3), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=3),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=3),
nn.ReLU(),
)
self.block4 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=4), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=4),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=4),
nn.ReLU(),
)
self.block5 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=5), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=5),
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=5),
nn.ReLU(),
)
self.block6 = nn.Sequential(
nn.Conv3d(in_channels=int(out_channels/4)*5, out_channels=out_channels, kernel_size=3, stride=1, padding=0, dilation=1), # or kernel_size=[3, 3, 1]
nn.ReLU(),
nn.Conv3d(in_channels=out_channels, out_channels=num_classes, kernel_size=3, stride=1, padding=0, dilation=1),
# nn.ReLU(),
)
self.compress1 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress2 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress3 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress4 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.compress5 = nn.Sequential(
nn.Conv3d(in_channels=out_channels, out_channels=int(out_channels/4), kernel_size=1, stride=1, padding=0, dilation=1),
nn.ReLU(),
)
self.upsample1 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False) # [96, 160, 160]
self.upsample2 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample3 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample4 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample5 = nn.Upsample(size=self.patch_size, mode='trilinear', align_corners=False)
self.upsample6 = nn.Upsample(size=self.original_size, mode='trilinear', align_corners=False)
self.apply(weightInitializer)
def forward(self, x):
x = F.interpolate(x, self.patch_size, mode="trilinear", align_corners=False)
x = self.block1(x)
compress1 = self.compress1(x)
x = self.block2(x)
compress2 = self.compress2(x)
x = self.block3(x)
compress3 = self.compress3(x)
x = self.block4(x)
compress4 = self.compress4(x)
x = self.block5(x)
compress5 = self.compress5(x)
compress1 = self.upsample1(compress1)
compress2 = self.upsample2(compress2)
compress3 = self.upsample3(compress3)
compress4 = self.upsample4(compress4)
compress5 = self.upsample5(compress5)
x = torch.cat((compress1, compress2, compress3, compress4, compress5), dim=1)
x = self.block6(x)
x = self.upsample6(x)
# x = softmax_helper(x)
return x
def compute_approx_vram_consumption(self):
return 715000000
| 51.069869 | 187 | 0.656178 | 1,597 | 11,695 | 4.608641 | 0.06387 | 0.179348 | 0.263315 | 0.191304 | 0.962772 | 0.962772 | 0.962772 | 0.962772 | 0.962772 | 0.962772 | 0 | 0.047603 | 0.216845 | 11,695 | 228 | 188 | 51.29386 | 0.755978 | 0.047798 | 0 | 0.843137 | 0 | 0 | 0.011341 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029412 | false | 0 | 0.029412 | 0.009804 | 0.088235 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ed6f47fb1dad1a246ce9a3687abc7f21f2ecf09a | 14,898 | py | Python | tests/base/test_interactive_test_base.py | AnantTiwari-Naman/pyglet | 4774f2889057da95a78785a69372112931e6a620 | [
"BSD-3-Clause"
] | 1,160 | 2019-06-13T11:51:40.000Z | 2022-03-31T01:55:32.000Z | tests/base/test_interactive_test_base.py | AaronCWacker/pyglet | 63b1ece7043133d47eb898857876e4927d9759b2 | [
"BSD-3-Clause"
] | 491 | 2019-07-14T16:13:11.000Z | 2022-03-31T08:04:32.000Z | tests/base/test_interactive_test_base.py | AaronCWacker/pyglet | 63b1ece7043133d47eb898857876e4927d9759b2 | [
"BSD-3-Clause"
] | 316 | 2019-06-14T13:56:48.000Z | 2022-03-30T19:26:58.000Z | """
Test the base class for interactive test cases.
"""
import glob
from tests import mock
import os
import pytest
import shutil
from .interactive import InteractiveTestCase
import tempfile
import unittest
import pyglet
from pyglet import window
from pyglet.gl import *
@pytest.mark.requires_user_action
class InteractiveTestCaseTest(InteractiveTestCase):
"""
Test the interactive test case base. Is an interactive test case itself, to be able to test it
properly.
"""
def setUp(self):
self._patchers = []
self._temporary_directories = []
def tearDown(self):
for patcher in self._patchers:
patcher.stop()
for directory in self._temporary_directories:
shutil.rmtree(directory)
def _patch_directory(self, target):
directory = tempfile.mkdtemp()
self._temporary_directories.append(directory)
patcher = mock.patch(target, directory)
self._patchers.append(patcher)
patcher.start()
return directory
def _patch_screenshot_paths(self):
self._session_screenshot_path = self._patch_directory('tests.base.interactive.session_screenshot_path')
self._committed_screenshot_path = self._patch_directory('tests.base.interactive.committed_screenshot_path')
def test_single_method(self):
class _Test(InteractiveTestCase):
test1_ran = False
def test_1(self):
_Test.test1_ran = True
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertTrue(_Test.test1_ran, 'Test should have run')
def test_multiple_methods(self):
class _Test(InteractiveTestCase):
test1_ran = False
test2_ran = False
def test_1(self):
_Test.test1_ran = True
def test_2(self):
_Test.test2_ran = True
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 2)
result = unittest.TestResult()
tests.run(result)
self.assertTrue(_Test.test1_ran, 'Test 1 should have run')
self.assertTrue(_Test.test2_ran, 'Test 2 should have run')
def test_user_verify_passed(self):
class _Test(InteractiveTestCase):
test1_ran = False
def test_1(self):
_Test.test1_ran = True
self.user_verify('Just press Enter', take_screenshot=False)
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertTrue(_Test.test1_ran, 'Test should have run')
self.assertEqual(len(result.failures), 0, 'Not expecting failures')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
self.user_verify('Did I ask you to press Enter?', take_screenshot=False)
def test_user_verify_failed(self):
class _Test(InteractiveTestCase):
test1_ran = False
def test_1(self):
_Test.test1_ran = True
self.user_verify('Enter "n" and then enter reason "abcd"', take_screenshot=False)
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertTrue(_Test.test1_ran, 'Test should have run')
self.assertEqual(len(result.failures), 1, 'Expected 1 test failure')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
self.assertIn('AssertionError: abcd', result.failures[0][1], 'Did not get failure message entered by user.')
def test_verify_commits_screenshot_on_user_passed(self):
class _Test(InteractiveTestCase):
def test_1(self):
w = window.Window(200, 200)
w.switch_to()
glClearColor(1, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
w.flip()
self.user_verify('Please choose yes (or press Enter)')
w.close()
self._patch_screenshot_paths()
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertEqual(len(result.failures), 0, 'Not expecting failures')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
files = glob.glob(os.path.join(self._session_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not stored in session directory')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
files = glob.glob(os.path.join(self._committed_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not committed')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
@mock.patch('tests.interactive.interactive_test_base.interactive', False)
def test_screenshot_taken_but_not_committed_on_noninteractive_failure(self):
class _Test(InteractiveTestCase):
def test_1(self):
w = window.Window(200, 200)
w.switch_to()
glClearColor(1, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
w.flip()
self.user_verify('Empty window')
w.close()
self.fail('Test failed')
self._patch_screenshot_paths()
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertEqual(len(result.failures), 1, 'Expecting 1 failure')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
files = glob.glob(os.path.join(self._session_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not stored in session directory')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
files = glob.glob(os.path.join(self._committed_screenshot_path, '*.png'))
self.assertEqual(len(files), 0, 'Screenshot should not have been comitted')
@mock.patch('tests.interactive.interactive_test_base.interactive', False)
@mock.patch('tests.interactive.interactive_test_base.allow_missing_screenshots', True)
def test_screenshot_taken_but_not_committed_on_noninteractive_pass(self):
class _Test(InteractiveTestCase):
def test_1(self):
w = window.Window(200, 200)
w.switch_to()
glClearColor(1, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
w.flip()
self.user_verify('Empty window')
w.close()
self._patch_screenshot_paths()
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertEqual(len(result.failures), 0, 'Not expecting failures')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
files = glob.glob(os.path.join(self._session_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not stored in session directory')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
files = glob.glob(os.path.join(self._committed_screenshot_path, '*.png'))
self.assertEqual(len(files), 0, 'Screenshot should not have been comitted')
@mock.patch('tests.interactive.interactive_test_base.interactive', False)
def test_fails_on_missing_screenshot_on_noninteractive_pass(self):
class _Test(InteractiveTestCase):
def test_1(self):
w = window.Window(200, 200)
w.switch_to()
glClearColor(1, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
w.flip()
self.user_verify('Empty window')
w.close()
self._patch_screenshot_paths()
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertEqual(len(result.failures), 1, 'Expecting 1 failure')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
files = glob.glob(os.path.join(self._session_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not stored in session directory')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
files = glob.glob(os.path.join(self._committed_screenshot_path, '*.png'))
self.assertEqual(len(files), 0, 'Screenshot should not have been comitted')
def test_screenshot_taken_but_not_committed_on_user_failure(self):
class _Test(InteractiveTestCase):
def test_1(self):
w = window.Window(200, 200)
w.switch_to()
glClearColor(1, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
w.flip()
try:
self.user_verify('Please select "n" and enter any reason')
finally:
w.close()
self._patch_screenshot_paths()
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertEqual(len(result.failures), 1, 'Expecting 1 failure')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
files = glob.glob(os.path.join(self._session_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not stored in session directory')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
files = glob.glob(os.path.join(self._committed_screenshot_path, '*.png'))
self.assertEqual(len(files), 0, 'Screenshot should not have been committed')
@mock.patch('tests.interactive.interactive_test_base.interactive', False)
def test_screenshot_does_not_match(self):
class _Test(InteractiveTestCase):
def test_1(self):
w = window.Window(200, 200)
w.switch_to()
glClearColor(0, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
w.flip()
self.user_verify('Empty window')
w.close()
self._patch_screenshot_paths()
# Copy non matching screenshot
screenshot_name = 'tests.interactive.test_interactive_test_base._Test.test_1.001.png'
original_screenshot = os.path.join(os.path.dirname(__file__), '..', 'data', 'images', screenshot_name)
committed_screenshot = os.path.join(self._committed_screenshot_path, screenshot_name)
shutil.copy(original_screenshot, committed_screenshot)
# Start the test
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertEqual(len(result.failures), 1, 'Expecting 1 failure')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
files = glob.glob(os.path.join(self._session_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not stored in session directory')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
# Verify committed image not changed
original_image = pyglet.image.load(original_screenshot)
committed_image = pyglet.image.load(committed_screenshot)
self.assert_image_equal(original_image, committed_image, msg='Committed image should not be overwritten')
@mock.patch('tests.interactive.interactive_test_base.interactive', False)
def test_screenshot_matches(self):
class _Test(InteractiveTestCase):
def test_1(self):
w = window.Window(200, 200)
w.switch_to()
glClearColor(1, 0, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
w.flip()
self.user_verify('Empty window')
w.close()
self._patch_screenshot_paths()
# Copy matching screenshot
screenshot_name = 'tests.interactive.test_interactive_test_base._Test.test_1.001.png'
original_screenshot = os.path.join(os.path.dirname(__file__), '..', 'data', 'images', screenshot_name)
committed_screenshot = os.path.join(self._committed_screenshot_path, screenshot_name)
shutil.copy(original_screenshot, committed_screenshot)
# Start the test
tests = unittest.defaultTestLoader.loadTestsFromTestCase(_Test)
self.assertIsNotNone(tests)
self.assertEqual(tests.countTestCases(), 1)
result = unittest.TestResult()
tests.run(result)
self.assertEqual(len(result.failures), 0, 'Not expecting failures')
self.assertEqual(len(result.errors), 0, 'Not expecting errors')
self.assertEqual(result.testsRun, 1, 'Expected 1 test run')
files = glob.glob(os.path.join(self._session_screenshot_path, '*.png'))
self.assertEqual(len(files), 1, 'Screenshot not stored in session directory')
self.assertIn('tests.interactive.test_interactive_test_base._Test.test_1.001.png', files[0])
| 39 | 116 | 0.655256 | 1,704 | 14,898 | 5.529343 | 0.098005 | 0.079601 | 0.057313 | 0.04585 | 0.838251 | 0.829972 | 0.829866 | 0.82042 | 0.805243 | 0.797495 | 0 | 0.018828 | 0.240636 | 14,898 | 381 | 117 | 39.102362 | 0.814019 | 0.018257 | 0 | 0.762774 | 0 | 0 | 0.178168 | 0.072912 | 0 | 0 | 0 | 0 | 0.277372 | 1 | 0.09854 | false | 0.014599 | 0.040146 | 0 | 0.186131 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed7886b8353662c317cb9b6dfb7cef6f4666c6d3 | 33,307 | py | Python | raiden/tests/integration/api/rest/test_channel.py | ezdac/raiden | d7504996e6738b55d5a9dcf9a36ef66797f6f326 | [
"MIT"
] | 1 | 2020-10-19T15:00:42.000Z | 2020-10-19T15:00:42.000Z | raiden/tests/integration/api/rest/test_channel.py | ezdac/raiden | d7504996e6738b55d5a9dcf9a36ef66797f6f326 | [
"MIT"
] | null | null | null | raiden/tests/integration/api/rest/test_channel.py | ezdac/raiden | d7504996e6738b55d5a9dcf9a36ef66797f6f326 | [
"MIT"
] | null | null | null | from http import HTTPStatus
import gevent
import grequests
import pytest
from eth_utils import to_canonical_address, to_checksum_address
from raiden.api.rest import APIServer
from raiden.constants import BLOCK_ID_LATEST, NULL_ADDRESS_HEX
from raiden.raiden_service import RaidenService
from raiden.tests.integration.api.rest.test_rest import DEPOSIT_FOR_TEST_API_DEPOSIT_LIMIT
from raiden.tests.integration.api.rest.utils import (
api_url_for,
assert_proper_response,
assert_response_with_code,
assert_response_with_error,
get_json_response,
)
from raiden.tests.utils import factories
from raiden.tests.utils.client import burn_eth
from raiden.tests.utils.detect_failure import raise_on_failure
from raiden.tests.utils.events import check_dict_nested_attrs
from raiden.transfer import views
from raiden.transfer.state import ChannelState
from raiden.utils.typing import List, TokenAmount
from raiden.waiting import wait_for_participant_deposit
from raiden_contracts.constants import TEST_SETTLE_TIMEOUT_MAX, TEST_SETTLE_TIMEOUT_MIN
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [1])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_status_channel_nonexistant(
api_server_test_instance: APIServer, token_addresses
):
partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
token_address = token_addresses[0]
request = grequests.get(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
)
)
response = request.send().response
assert_proper_response(response, HTTPStatus.NOT_FOUND)
assert get_json_response(response)["errors"] == (
"Channel with partner '{}' for token '{}' could not be found.".format(
to_checksum_address(partner_address), to_checksum_address(token_address)
)
)
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [1])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_open_and_deposit(
api_server_test_instance: APIServer, token_addresses, reveal_timeout
):
first_partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
token_address = token_addresses[0]
token_address_hex = to_checksum_address(token_address)
settle_timeout = 1650
channel_data_obj = {
"partner_address": first_partner_address,
"token_address": token_address_hex,
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
}
# First let's try to create channel with the null address and see error is handled
channel_data_obj["partner_address"] = NULL_ADDRESS_HEX
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_response_with_error(response, status_code=HTTPStatus.BAD_REQUEST)
# now let's really create a new channel
channel_data_obj["partner_address"] = first_partner_address
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CREATED)
first_channel_id = 1
json_response = get_json_response(response)
expected_response = channel_data_obj.copy()
expected_response.update(
{
"balance": "0",
"state": ChannelState.STATE_OPENED.value,
"channel_identifier": "1",
"total_deposit": "0",
}
)
assert check_dict_nested_attrs(json_response, expected_response)
token_network_address = json_response["token_network_address"]
# Now let's try to open the same channel again, because it is possible for
# the participants to race on the channel creation, this is not considered
# an error.
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.OK)
json_response = get_json_response(response)
assert check_dict_nested_attrs(json_response, expected_response)
# now let's open a channel and make a deposit too
second_partner_address = "0x29FA6cf0Cce24582a9B20DB94Be4B6E017896038"
total_deposit = 100
channel_data_obj = {
"partner_address": second_partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
"total_deposit": str(total_deposit),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CREATED)
second_channel_id = 2
json_response = get_json_response(response)
expected_response = channel_data_obj.copy()
expected_response.update(
{
"balance": str(total_deposit),
"state": ChannelState.STATE_OPENED.value,
"channel_identifier": str(second_channel_id),
"token_network_address": token_network_address,
"total_deposit": str(total_deposit),
}
)
assert check_dict_nested_attrs(json_response, expected_response)
# assert depositing again with less than the initial deposit returns 409
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=second_partner_address,
),
json={"total_deposit": "99"},
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CONFLICT)
# assert depositing negative amount fails
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=first_partner_address,
),
json={"total_deposit": "-1000"},
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CONFLICT)
# let's deposit on the first channel
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=first_partner_address,
),
json={"total_deposit": str(total_deposit)},
)
response = request.send().response
assert_proper_response(response)
json_response = get_json_response(response)
expected_response = {
"channel_identifier": str(first_channel_id),
"partner_address": first_partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
"state": ChannelState.STATE_OPENED.value,
"balance": str(total_deposit),
"total_deposit": str(total_deposit),
"token_network_address": token_network_address,
}
assert check_dict_nested_attrs(json_response, expected_response)
# let's try querying for the second channel
request = grequests.get(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=second_partner_address,
)
)
response = request.send().response
assert_proper_response(response)
json_response = get_json_response(response)
expected_response = {
"channel_identifier": str(second_channel_id),
"partner_address": second_partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
"state": ChannelState.STATE_OPENED.value,
"balance": str(total_deposit),
"total_deposit": str(total_deposit),
"token_network_address": token_network_address,
}
assert check_dict_nested_attrs(json_response, expected_response)
# finally let's burn all eth and try to open another channel
burn_eth(api_server_test_instance.rest_api.raiden_api.raiden.rpc_client)
channel_data_obj = {
"partner_address": "0xf3AF96F89b3d7CdcBE0C083690A28185Feb0b3CE",
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.PAYMENT_REQUIRED)
json_response = get_json_response(response)
assert "The account balance is below the estimated amount" in json_response["errors"]
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [1])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_open_and_deposit_race(
api_server_test_instance: APIServer,
raiden_network: List[RaidenService],
token_addresses,
reveal_timeout,
token_network_registry_address,
retry_timeout,
):
"""Tests that a race for the same deposit from the API is handled properly
The proxy's approve_and_set_total_deposit is raising a
RaidenRecoverableError in case of races. That needs to be properly handled
and not allowed to bubble out of the greenlet.
Regression test for https://github.com/raiden-network/raiden/issues/4937
"""
app0 = raiden_network[0]
# let's create a new channel
first_partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
token_address = token_addresses[0]
settle_timeout = 1650
channel_data_obj = {
"partner_address": first_partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CREATED)
json_response = get_json_response(response)
expected_response = channel_data_obj.copy()
expected_response.update(
{
"balance": "0",
"state": ChannelState.STATE_OPENED.value,
"channel_identifier": "1",
"total_deposit": "0",
}
)
assert check_dict_nested_attrs(json_response, expected_response)
# Prepare the deposit api call
deposit_amount = TokenAmount(99)
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=first_partner_address,
),
json={"total_deposit": str(deposit_amount)},
)
# Spawn two greenlets doing the same deposit request
greenlets = [gevent.spawn(request.send), gevent.spawn(request.send)]
gevent.joinall(set(greenlets), raise_error=True)
# Make sure that both responses are fine
g1_response = greenlets[0].get().response
assert_proper_response(g1_response, HTTPStatus.OK)
json_response = get_json_response(g1_response)
expected_response.update(
{"total_deposit": str(deposit_amount), "balance": str(deposit_amount)}
)
assert check_dict_nested_attrs(json_response, expected_response)
g2_response = greenlets[0].get().response
assert_proper_response(g2_response, HTTPStatus.OK)
json_response = get_json_response(g2_response)
assert check_dict_nested_attrs(json_response, expected_response)
# Wait for the deposit to be seen
timeout_seconds = 20
exception = Exception(f"Expected deposit not seen within {timeout_seconds}")
with gevent.Timeout(seconds=timeout_seconds, exception=exception):
wait_for_participant_deposit(
raiden=app0,
token_network_registry_address=token_network_registry_address,
token_address=token_address,
partner_address=to_canonical_address(first_partner_address),
target_address=app0.address,
target_balance=deposit_amount,
retry_timeout=retry_timeout,
)
request = grequests.get(api_url_for(api_server_test_instance, "channelsresource"))
response = request.send().response
assert_proper_response(response, HTTPStatus.OK)
json_response = get_json_response(response)
channel_info = json_response[0]
assert channel_info["token_address"] == to_checksum_address(token_address)
assert channel_info["total_deposit"] == str(deposit_amount)
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [1])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_open_close_and_settle(
api_server_test_instance: APIServer, token_addresses, reveal_timeout
):
# let's create a new channel
partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
token_address = token_addresses[0]
settle_timeout = 1650
channel_data_obj = {
"partner_address": partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
balance = 0
assert_proper_response(response, status_code=HTTPStatus.CREATED)
channel_identifier = 1
json_response = get_json_response(response)
expected_response = channel_data_obj.copy()
expected_response.update(
{
"balance": str(balance),
"state": ChannelState.STATE_OPENED.value,
"reveal_timeout": str(reveal_timeout),
"channel_identifier": str(channel_identifier),
"total_deposit": "0",
}
)
assert check_dict_nested_attrs(json_response, expected_response)
token_network_address = json_response["token_network_address"]
# let's close the channel
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json={"state": ChannelState.STATE_CLOSED.value},
)
response = request.send().response
assert_proper_response(response)
expected_response = {
"token_network_address": token_network_address,
"channel_identifier": str(channel_identifier),
"partner_address": partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
"state": ChannelState.STATE_CLOSED.value,
"balance": str(balance),
"total_deposit": str(balance),
}
assert check_dict_nested_attrs(get_json_response(response), expected_response)
# try closing the channel again
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json={"state": ChannelState.STATE_CLOSED.value},
)
# Closing the channel again should not work
response = request.send().response
assert_proper_response(response, HTTPStatus.CONFLICT)
# Try sending a payment when channel is closed
request = grequests.post(
api_url_for(
api_server_test_instance,
"token_target_paymentresource",
token_address=to_checksum_address(token_address),
target_address=to_checksum_address(partner_address),
),
json={"amount": "1"},
)
# Payment should not work since channel is closing
response = request.send().response
assert_proper_response(response, HTTPStatus.CONFLICT)
# Try to create channel with the same partner again before previous channel settles
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
# Channel exists and is currently being settled so API request to open channel should fail
response = request.send().response
assert_proper_response(response, HTTPStatus.CONFLICT)
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [2])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_close_insufficient_eth(
api_server_test_instance: APIServer, token_addresses, reveal_timeout
):
# let's create a new channel
partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
token_address = token_addresses[0]
settle_timeout = 1650
channel_data_obj = {
"partner_address": partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
balance = 0
assert_proper_response(response, status_code=HTTPStatus.CREATED)
channel_identifier = 1
json_response = get_json_response(response)
expected_response = channel_data_obj.copy()
expected_response.update(
{
"balance": str(balance),
"state": ChannelState.STATE_OPENED.value,
"reveal_timeout": str(reveal_timeout),
"channel_identifier": str(channel_identifier),
"total_deposit": "0",
}
)
assert check_dict_nested_attrs(json_response, expected_response)
# let's burn all eth and try to close the channel
burn_eth(api_server_test_instance.rest_api.raiden_api.raiden.rpc_client)
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json={"state": ChannelState.STATE_CLOSED.value},
)
response = request.send().response
assert_proper_response(response, HTTPStatus.PAYMENT_REQUIRED)
json_response = get_json_response(response)
assert "insufficient ETH" in json_response["errors"]
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [1])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_open_channel_invalid_input(
api_server_test_instance: APIServer, token_addresses, reveal_timeout
):
partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
token_address = token_addresses[0]
settle_timeout = TEST_SETTLE_TIMEOUT_MIN - 1
channel_data_obj = {
"partner_address": partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_response_with_error(response, status_code=HTTPStatus.CONFLICT)
channel_data_obj["settle_timeout"] = str(TEST_SETTLE_TIMEOUT_MAX + 1)
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_response_with_error(response, status_code=HTTPStatus.CONFLICT)
channel_data_obj["settle_timeout"] = str(TEST_SETTLE_TIMEOUT_MAX - 1)
channel_data_obj["token_address"] = to_checksum_address(factories.make_address())
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_response_with_error(response, status_code=HTTPStatus.CONFLICT)
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [1])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_state_change_errors(
api_server_test_instance: APIServer, token_addresses, reveal_timeout
):
partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
token_address = token_addresses[0]
settle_timeout = 1650
channel_data_obj = {
"partner_address": partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CREATED)
# let's try to set a random state
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(state="inlimbo"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.BAD_REQUEST)
# let's try to set both new state and total_deposit
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(state=ChannelState.STATE_CLOSED.value, total_deposit="200"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# let's try to set both new state and total_withdraw
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(state=ChannelState.STATE_CLOSED.value, total_withdraw="200"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# let's try to set both total deposit and total_withdraw
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_deposit="500", total_withdraw="200"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# let's try to set both new state and reveal_timeout
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(state=ChannelState.STATE_CLOSED.value, reveal_timeout="50"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# let's try to set both total_deposit and reveal_timeout
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_deposit="500", reveal_timeout="50"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# let's try to set both total_withdraw and reveal_timeout
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_withdraw="500", reveal_timeout="50"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# let's try to patch with no arguments
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
)
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.BAD_REQUEST)
# ok now let's close and settle for real
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(state=ChannelState.STATE_CLOSED.value),
)
response = request.send().response
assert_proper_response(response)
# let's try to deposit to a settled channel
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_deposit="500"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [2])
@pytest.mark.parametrize("deposit", [1000])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_withdraw(
api_server_test_instance: APIServer, raiden_network: List[RaidenService], token_addresses
):
_, app1 = raiden_network
token_address = token_addresses[0]
partner_address = app1.address
# Withdraw a 0 amount
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_withdraw="0"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# Withdraw an amount larger than balance
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_withdraw="1500"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
# Withdraw a valid amount
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_withdraw="750"),
)
response = request.send().response
assert_response_with_code(response, HTTPStatus.OK)
# Withdraw same amount as before which would sum up to more than the balance
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(total_withdraw="750"),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [2])
@pytest.mark.parametrize("deposit", [0])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_set_reveal_timeout(
api_server_test_instance: APIServer,
raiden_network: List[RaidenService],
token_addresses,
settle_timeout,
):
app0, app1 = raiden_network
token_address = token_addresses[0]
partner_address = app1.address
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(reveal_timeout=0),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(reveal_timeout=settle_timeout + 1),
)
response = request.send().response
assert_response_with_error(response, HTTPStatus.CONFLICT)
reveal_timeout = int(settle_timeout / 2)
request = grequests.patch(
api_url_for(
api_server_test_instance,
"channelsresourcebytokenandpartneraddress",
token_address=token_address,
partner_address=partner_address,
),
json=dict(reveal_timeout=reveal_timeout),
)
response = request.send().response
assert_response_with_code(response, HTTPStatus.OK)
token_network_address = views.get_token_network_address_by_token_address(
views.state_from_raiden(app0), app0.default_registry.address, token_address
)
assert token_network_address
channel_state = views.get_channelstate_by_token_network_and_partner(
chain_state=views.state_from_raiden(app0),
token_network_address=token_network_address,
partner_address=app1.address,
)
assert channel_state
assert channel_state.reveal_timeout == reveal_timeout
@raise_on_failure
@pytest.mark.parametrize("number_of_nodes", [1])
@pytest.mark.parametrize("channels_per_node", [0])
@pytest.mark.parametrize("deposit", [DEPOSIT_FOR_TEST_API_DEPOSIT_LIMIT])
@pytest.mark.parametrize("enable_rest_api", [True])
def test_api_channel_deposit_limit(
api_server_test_instance,
proxy_manager,
token_network_registry_address,
token_addresses,
reveal_timeout,
):
token_address = token_addresses[0]
registry = proxy_manager.token_network_registry(
token_network_registry_address, BLOCK_ID_LATEST
)
token_network_address = registry.get_token_network(token_address, BLOCK_ID_LATEST)
token_network = proxy_manager.token_network(token_network_address, BLOCK_ID_LATEST)
deposit_limit = token_network.channel_participant_deposit_limit(BLOCK_ID_LATEST)
# let's create a new channel and deposit exactly the limit amount
first_partner_address = "0x61C808D82A3Ac53231750daDc13c777b59310bD9"
settle_timeout = 1650
channel_data_obj = {
"partner_address": first_partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
"total_deposit": str(deposit_limit),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CREATED)
first_channel_identifier = 1
json_response = get_json_response(response)
expected_response = channel_data_obj.copy()
expected_response.update(
{
"balance": str(deposit_limit),
"state": ChannelState.STATE_OPENED.value,
"channel_identifier": str(first_channel_identifier),
"total_deposit": str(deposit_limit),
}
)
assert check_dict_nested_attrs(json_response, expected_response)
# now let's open a channel and deposit a bit more than the limit
second_partner_address = "0x29FA6cf0Cce24582a9B20DB94Be4B6E017896038"
balance_failing = deposit_limit + 1 # token has two digits
channel_data_obj = {
"partner_address": second_partner_address,
"token_address": to_checksum_address(token_address),
"settle_timeout": str(settle_timeout),
"reveal_timeout": str(reveal_timeout),
"total_deposit": str(balance_failing),
}
request = grequests.put(
api_url_for(api_server_test_instance, "channelsresource"), json=channel_data_obj
)
response = request.send().response
assert_proper_response(response, HTTPStatus.CONFLICT)
json_response = get_json_response(response)
assert (
json_response["errors"]
== "Deposit of 75000000000000001 is larger than the channel participant deposit limit"
)
| 37.507883 | 94 | 0.711502 | 3,718 | 33,307 | 6.008338 | 0.073427 | 0.054255 | 0.049331 | 0.051703 | 0.818703 | 0.784279 | 0.759658 | 0.749541 | 0.728188 | 0.716191 | 0 | 0.017988 | 0.205512 | 33,307 | 887 | 95 | 37.550169 | 0.826204 | 0.069325 | 0 | 0.706965 | 0 | 0 | 0.131074 | 0.053542 | 0 | 0 | 0.014937 | 0 | 0.089356 | 1 | 0.013141 | false | 0 | 0.024967 | 0 | 0.038108 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed7ea789807fe49eb8b661c64356853601e612ed | 143,553 | py | Python | core/regrid.py | jmccreight/WrfHydroForcing | d122b17a8f58d1a6db58eb18f937f737fe1b27f8 | [
"BSD-3-Clause"
] | null | null | null | core/regrid.py | jmccreight/WrfHydroForcing | d122b17a8f58d1a6db58eb18f937f737fe1b27f8 | [
"BSD-3-Clause"
] | null | null | null | core/regrid.py | jmccreight/WrfHydroForcing | d122b17a8f58d1a6db58eb18f937f737fe1b27f8 | [
"BSD-3-Clause"
] | null | null | null | """
Regridding module file for regridding input forcing files.
"""
import os
import sys
import traceback
import time
import ESMF
import numpy as np
from core import err_handler
from core import ioMod
from core import timeInterpMod
# TODO: import these from forcingInputMod (not working currently ¯\_(ツ)_/¯)
NETCDF = "NETCDF"
GRIB2 = "GRIB2"
next_file_number = 0
def mkfilename():
global next_file_number
next_file_number += 1
return '{}'.format(next_file_number)
def static_vars(**kwargs):
def decorate(func):
for k in kwargs:
setattr(func, k, kwargs[k])
return func
return decorate
def create_link(name, input_file, tmpFile, config_options, mpi_config):
if mpi_config.rank == 0:
try:
config_options.statusMsg = name + " file being used: " + input_file
err_handler.log_msg(config_options, mpi_config)
os.symlink(input_file, tmpFile)
except:
config_options.errMsg = "Unable to create link: " + input_file + " to: " + tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def regrid_conus_hrrr(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handling regridding of HRRR data.
:param input_forcings:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(input_forcings.file_in2):
if mpi_config.rank == 0:
config_options.statusMsg = "No HRRR in_2 file found for this timestep."
err_handler.log_msg(config_options, mpi_config)
err_handler.log_msg(config_options, mpi_config)
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if input_forcings.regridComplete:
if mpi_config.rank == 0:
config_options.statusMsg = "No HRRR regridding required for this timestep."
err_handler.log_msg(config_options, mpi_config)
err_handler.log_msg(config_options, mpi_config)
return
# Create a path for a temporary NetCDF file
input_forcings.tmpFile = config_options.scratch_dir + "/" + "HRRR_CONUS_TMP-{}.nc".format(mkfilename())
if input_forcings.fileType != NETCDF:
# This file shouldn't exist.... but if it does (previously failed
# execution of the program), remove it.....
if mpi_config.rank == 0:
if os.path.isfile(input_forcings.tmpFile):
config_options.statusMsg = "Found old temporary file: " + input_forcings.tmpFile + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove temporary file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
fields = []
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Converting CONUS HRRR Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
time_str = "{}-{} hour acc fcst".format(input_forcings.fcst_hour1, input_forcings.fcst_hour2) \
if grib_var == 'APCP' else str(input_forcings.fcst_hour2) + " hour fcst"
fields.append(':' + grib_var + ':' +
input_forcings.grib_levels[force_count] + ':'
+ time_str + ":")
fields.append(":(HGT):(surface):")
# Create a temporary NetCDF file from the GRIB2 file.
cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \
" -netcdf " + input_forcings.tmpFile
id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd,
config_options, mpi_config, inputVar=None)
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("HRRR", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config)
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Processing Conus HRRR Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings,
config_options, wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculating HRRR regridding weights."
err_handler.log_msg(config_options, mpi_config)
calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# # Read in the HRRR height field, which is used for downscaling purposes.
# if mpi_config.rank == 0:
# config_options.statusMsg = "Reading in HRRR elevation data."
# err_handler.log_msg(config_options, mpi_config)
# cmd = "$WGRIB2 " + input_forcings.file_in2 + " -match " + \
# "\":(HGT):(surface):\" " + \
# " -netcdf " + input_forcings.tmpFileHeight
# id_tmp_height = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFileHeight,
# cmd, config_options, mpi_config, 'HGT_surface')
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the height variable.
var_tmp = None
if mpi_config.rank == 0:
try:
var_tmp = id_tmp.variables['HGT_surface'][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract HRRR elevation from " + \
input_forcings.tmpFile + ": " + str(err)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place input NetCDF HRRR data into the ESMF field object: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding HRRR surface elevation data to the WRF-Hydro domain."
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid HRRR surface elevation using ESMF: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to perform HRRR mask search on elevation data: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.height[:, :] = input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract regridded HRRR elevation data from ESMF: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
# if mpi_config.rank == 0:
# try:
# id_tmp_height.close()
# except OSError:
# config_options.errMsg = "Unable to close temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
#
# try:
# os.remove(input_forcings.tmpFileHeight)
# except OSError:
# config_options.errMsg = "Unable to remove temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
config_options.statusMsg = "Processing input HRRR variable: " + \
input_forcings.netcdf_var_names[force_count]
err_handler.log_msg(config_options, mpi_config)
try:
var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :]
if grib_var == "APCP":
var_tmp /= 3600 # convert hourly accumulated precip to instantaneous rate
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract: " + input_forcings.netcdf_var_names[force_count] + \
" from: " + input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place input HRRR data into ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding Input HRRR Field: " + input_forcings.netcdf_var_names[force_count]
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid input HRRR forcing data: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to perform mask test on regridded HRRR forcings: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract regridded HRRR forcing data from the ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
# mpi_config.comm.barrier()
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
# mpi_config.comm.barrier()
def regrid_conus_rap(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handling regridding of RAP 13km conus data.
:param input_forcings:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(input_forcings.file_in2):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if input_forcings.regridComplete:
if mpi_config.rank == 0:
config_options.statusMsg = "No RAP regridding required for this timestep."
err_handler.log_msg(config_options, mpi_config)
return
# Create a path for a temporary NetCDF file
input_forcings.tmpFile = config_options.scratch_dir + "/" + "RAP_CONUS_TMP-{}.nc".format(mkfilename())
err_handler.check_program_status(config_options, mpi_config)
if input_forcings.fileType != NETCDF:
# This file shouldn't exist.... but if it does (previously failed
# execution of the program), remove it.....
if mpi_config.rank == 0:
if os.path.isfile(input_forcings.tmpFile):
config_options.statusMsg = "Found old temporary file: " + \
input_forcings.tmpFile + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
fields = []
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Converting CONUS RAP Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
time_str = "{}-{} hour acc fcst".format(input_forcings.fcst_hour1, input_forcings.fcst_hour2) \
if grib_var == 'APCP' else str(input_forcings.fcst_hour2) + " hour fcst"
fields.append(':' + grib_var + ':' +
input_forcings.grib_levels[force_count] + ':'
+ time_str + ":")
fields.append(":(HGT):(surface):")
# Create a temporary NetCDF file from the GRIB2 file.
cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \
" -netcdf " + input_forcings.tmpFile
id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd,
config_options, mpi_config, inputVar=None)
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("RAP", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config)
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Processing Conus RAP Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings,
config_options, wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculating RAP regridding weights."
err_handler.log_msg(config_options, mpi_config)
calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Read in the RAP height field, which is used for downscaling purposes.
# if mpi_config.rank == 0:
# config_options.statusMsg = "Reading in RAP elevation data."
# err_handler.log_msg(config_options, mpi_config)
# cmd = "$WGRIB2 " + input_forcings.file_in2 + " -match " + \
# "\":(HGT):(surface):\" " + \
# " -netcdf " + input_forcings.tmpFileHeight
# id_tmp_height = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFileHeight,
# cmd, config_options, mpi_config, 'HGT_surface')
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the height variable.
var_tmp = None
if mpi_config.rank == 0:
try:
var_tmp = id_tmp.variables['HGT_surface'][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract HGT_surface from : " + id_tmp + \
" (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place temporary RAP elevation variable into ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding RAP surface elevation data to the WRF-Hydro domain."
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid RAP elevation data using ESMF: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to perform mask search on RAP elevation data: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.height[:, :] = input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place RAP ESMF elevation field into local array: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
# if mpi_config.rank == 0:
# try:
# id_tmp_height.close()
# except OSError:
# config_options.errMsg = "Unable to close temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
#
# try:
# os.remove(input_forcings.tmpFileHeight)
# except OSError:
# config_options.errMsg = "Unable to remove temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
try:
var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :]
if grib_var == "APCP":
var_tmp /= 3600 # convert hourly accumulated precip to instantaneous rate
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract: " + input_forcings.netcdf_var_names[force_count] + \
" from: " + input_forcings.tmpFile + \
" (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place local RAP array into ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding Input RAP Field: " + input_forcings.netcdf_var_names[force_count]
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid RAP variable: " + input_forcings.netcdf_var_names[force_count] \
+ str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run mask calculation on RAP variable: " + \
input_forcings.netcdf_var_names[force_count] + " (" + str(npe) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place RAP ESMF data into local array: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def regrid_cfsv2(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handling regridding of global CFSv2 forecast data.
:param input_forcings:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(input_forcings.file_in2):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if input_forcings.regridComplete:
# Check to see if we are running NWM-custom interpolation/bias
# correction on incoming CFSv2 data. Because of the nature, we
# need to regrid bias-corrected data every hour.
if mpi_config.rank == 0:
config_options.statusMsg = "No need to read in new CFSv2 data at this time."
err_handler.log_msg(config_options, mpi_config)
return
# Create a path for a temporary NetCDF file
input_forcings.tmpFile = config_options.scratch_dir + "/" + "CFSv2_TMP-{}.nc".format(mkfilename())
err_handler.check_program_status(config_options, mpi_config)
if input_forcings.fileType != NETCDF:
# This file shouldn't exist.... but if it does (previously failed
# execution of the program), remove it.....
if mpi_config.rank == 0:
if os.path.isfile(input_forcings.tmpFile):
config_options.statusMsg = "Found old temporary file: " + \
input_forcings.tmpFile + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError as err:
config_options.errMsg = "Unable to remove previous temporary file: " \
+ input_forcings.tmpFile + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
fields = []
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Converting CFSv2 Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
fields.append(':' + grib_var + ':' +
input_forcings.grib_levels[force_count] + ':'
+ str(input_forcings.fcst_hour2) + " hour fcst:")
fields.append(":(HGT):(surface):")
# Create a temporary NetCDF file from the GRIB2 file.
cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \
" -netcdf " + input_forcings.tmpFile
id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd,
config_options, mpi_config, inputVar=None)
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("CFSv2", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config)
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Processing CFSv2 Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings,
config_options, wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculate CFSv2 regridding weights."
err_handler.log_msg(config_options, mpi_config)
calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Read in the RAP height field, which is used for downscaling purposes.
# if mpi_config.rank == 0:
# config_options.statusMsg = "Reading in CFSv2 elevation data."
# err_handler.log_msg(config_options, mpi_config)
#
# cmd = "$WGRIB2 " + input_forcings.file_in2 + " -match " + \
# "\":(HGT):(surface):\" " + \
# " -netcdf " + input_forcings.tmpFileHeight
# id_tmp_height = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFileHeight,
# cmd, config_options, mpi_config, 'HGT_surface')
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the height variable.
var_tmp = None
if mpi_config.rank == 0:
try:
var_tmp = id_tmp.variables['HGT_surface'][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract HGT_surface from file: " \
+ input_forcings.file_in2 + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place CFSv2 elevation data into the ESMF field object: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding CFSv2 elevation data to the WRF-Hydro domain."
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid CFSv2 elevation data to the WRF-Hydro domain: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run mask calculation on CFSv2 elevation data: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.height[:, :] = input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract CFSv2 regridded elevation data from ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
# if mpi_config.rank == 0:
# try:
# id_tmp_height.close()
# except OSError:
# config_options.errMsg = "Unable to close temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
# err_handler.check_program_status(config_options, mpi_config)
#
# if mpi_config.rank == 0:
# try:
# os.remove(input_forcings.tmpFileHeight)
# except OSError:
# config_options.errMsg = "Unable to remove temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
if not config_options.runCfsNldasBiasCorrect:
config_options.statusMsg = "Regridding CFSv2 variable: " + \
input_forcings.netcdf_var_names[force_count]
err_handler.log_msg(config_options, mpi_config)
try:
var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract: " + input_forcings.netcdf_var_names[force_count] + \
" from file: " + input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Scatter the global CFSv2 data to the local processors.
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
# Assign local CFSv2 data to the input forcing object.. IF..... we are running the
# bias correction. These grids are interpolated in a separate routine, AFTER bias
# correction has taken place.
if config_options.runCfsNldasBiasCorrect:
if input_forcings.coarse_input_forcings1 is None: # and config_options.current_output_step == 1:
# if not np.any(input_forcings.coarse_input_forcings1) and not \
# np.any(input_forcings.coarse_input_forcings2) and \
# ConfigOptions.current_output_step == 1:
# We need to create NumPy arrays to hold the CFSv2 global data.
input_forcings.coarse_input_forcings1 = np.empty([8, var_sub_tmp.shape[0], var_sub_tmp.shape[1]],
np.float64)
if input_forcings.coarse_input_forcings2 is None: # and config_options.current_output_step == 1:
# if not np.any(input_forcings.coarse_input_forcings1) and not \
# np.any(input_forcings.coarse_input_forcings2) and \
# ConfigOptions.current_output_step == 1:
# We need to create NumPy arrays to hold the CFSv2 global data.
input_forcings.coarse_input_forcings2 = np.empty([8, var_sub_tmp.shape[0], var_sub_tmp.shape[1]],
np.float64)
try:
input_forcings.coarse_input_forcings2[input_forcings.input_map_output[force_count], :, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place local CFSv2 input variable: " + \
input_forcings.netcdf_var_names[force_count] + \
" into local numpy array. (" + str(err) + ")"
# except TypeError:
# print("DEBUG: ", input_forcings.coarse_input_forcings2, input_forcings.input_map_output, force_count)
if config_options.current_output_step == 1:
input_forcings.coarse_input_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.coarse_input_forcings2[input_forcings.input_map_output[force_count], :, :]
else:
input_forcings.coarse_input_forcings2 = None
input_forcings.coarse_input_forcings1 = None
err_handler.check_program_status(config_options, mpi_config)
# Only regrid the current files if we did not specify the NLDAS2 NWM bias correction, which needs to take place
# first before any regridding can take place. That takes place in the bias-correction routine.
if not config_options.runCfsNldasBiasCorrect:
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place CFSv2 forcing data into temporary ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid CFSv2 variable: " + \
input_forcings.netcdf_var_names[force_count] + " (" + str(ve) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run mask calculation on CFSv2 variable: " + \
input_forcings.netcdf_var_names[force_count] + " (" + str(npe) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract ESMF field data for CFSv2: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
err_handler.check_program_status(config_options, mpi_config)
else:
# Set regridded arrays to dummy values as they are regridded later in the bias correction routine.
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
config_options.globalNdv
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
config_options.globalNdv
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def regrid_custom_hourly_netcdf(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handling regridding of custom input NetCDF hourly forcing files.
:param input_forcings:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(input_forcings.file_in2):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if input_forcings.regridComplete:
return
if mpi_config.rank == 0:
config_options.statusMsg = "No Custom Hourly NetCDF regridding required for this timestep."
err_handler.log_msg(config_options, mpi_config)
# mpi_config.comm.barrier()
# Open the input NetCDF file containing necessary data.
id_tmp = ioMod.open_netcdf_forcing(input_forcings.file_in2, config_options, mpi_config)
# mpi_config.comm.barrier()
for force_count, nc_var in enumerate(input_forcings.netcdf_var_names):
if mpi_config.rank == 0:
config_options.statusMsg = "Processing Custom NetCDF Forcing Variable: " + \
nc_var
err_handler.log_msg(config_options, mpi_config)
calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings,
config_options, wrf_hydro_geo_meta, mpi_config)
if calc_regrid_flag:
calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config)
# Read in the RAP height field, which is used for downscaling purposes.
if 'HGT_surface' not in id_tmp.variables.keys():
config_options.errMsg = "Unable to locate HGT_surface in: " + input_forcings.file_in2
raise Exception()
# mpi_config.comm.barrier()
# Regrid the height variable.
if mpi_config.rank == 0:
var_tmp = id_tmp.variables['HGT_surface'][0, :, :]
else:
var_tmp = None
# mpi_config.comm.barrier()
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
# mpi_config.comm.barrier()
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
# mpi_config.comm.barrier()
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
# Set any pixel cells outside the input domain to the global missing value.
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
# mpi_config.comm.barrier()
input_forcings.height[:, :] = input_forcings.esmf_field_out.data
# mpi_config.comm.barrier()
# mpi_config.comm.barrier()
# Regrid the input variables.
if mpi_config.rank == 0:
var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :]
else:
var_tmp = None
# mpi_config.comm.barrier()
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
# mpi_config.comm.barrier()
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
# mpi_config.comm.barrier()
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
# Set any pixel cells outside the input domain to the global missing value.
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
# mpi_config.comm.barrier()
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.esmf_field_out.data
# mpi_config.comm.barrier()
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
# mpi_config.comm.barrier()
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile
err_handler.err_out(config_options)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile
err_handler.err_out(config_options)
@static_vars(last_file=None)
def regrid_gfs(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handing regridding of input GFS data
fro GRIB2 files.
:param input_forcings:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(input_forcings.file_in2):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if input_forcings.regridComplete:
if mpi_config.rank == 0:
config_options.statusMsg = "No 13km GFS regridding required for this timestep."
err_handler.log_msg(config_options, mpi_config)
return
# Create a path for a temporary NetCDF file
input_forcings.tmpFile = config_options.scratch_dir + "/" + "GFS_TMP.nc"
err_handler.check_program_status(config_options, mpi_config)
# check / set previous file to see if we're going to reuse
reuse_prev_file = (input_forcings.file_in2 == regrid_gfs.last_file)
regrid_gfs.last_file = input_forcings.file_in2
# This file may exist. If it does, and we don't need it again, remove it.....
if not reuse_prev_file and mpi_config.rank == 0:
if os.path.isfile(input_forcings.tmpFile):
config_options.statusMsg = "Found old temporary file: " + \
input_forcings.tmpFile + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# We will process each variable at a time. Unfortunately, wgrib2 makes it a bit
# difficult to handle forecast strings, otherwise this could be done in one command.
# This makes a compelling case for the use of a GRIB Python API in the future....
# Incoming shortwave radiation flux.....
# Loop through all of the input forcings in GFS data. Convert the GRIB2 files
# to NetCDF, read in the data, regrid it, then map it to the appropriate
# array slice in the output arrays.
if reuse_prev_file:
if mpi_config.rank == 0:
config_options.statusMsg = "Reusing previous input file: " + input_forcings.file_in2
err_handler.log_msg(config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
else:
if input_forcings.fileType != NETCDF:
fields = []
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Converting 13km GFS Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
# Create a temporary NetCDF file from the GRIB2 file.
if grib_var == "PRATE":
# By far the most complicated of output variables. We need to calculate
# our 'average' PRATE based on our current hour.
if input_forcings.fcst_hour2 <= 240:
tmp_hr_current = input_forcings.fcst_hour2
diff_tmp = tmp_hr_current % 6 if tmp_hr_current % 6 > 0 else 6
tmp_hr_previous = tmp_hr_current - diff_tmp
else:
tmp_hr_previous = input_forcings.fcst_hour1
fields.append(':' + grib_var + ':' +
input_forcings.grib_levels[force_count] + ':' +
str(tmp_hr_previous) + '-' + str(input_forcings.fcst_hour2) + " hour ave fcst:")
else:
fields.append(':' + grib_var + ':' +
input_forcings.grib_levels[force_count] + ':'
+ str(input_forcings.fcst_hour2) + " hour fcst:")
# if calc_regrid_flag:
fields.append(":(HGT):(surface):")
cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \
" -netcdf " + input_forcings.tmpFile
id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd,
config_options, mpi_config, inputVar=None)
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("GFS", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config)
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Processing 13km GFS Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings,
config_options, wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculating 13km GFS regridding weights."
err_handler.log_msg(config_options, mpi_config)
calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Read in the GFS height field, which is used for downscaling purposes.
# if mpi_config.rank == 0:
# config_options.statusMsg = "Reading in 13km GFS elevation data."
# err_handler.log_msg(config_options, mpi_config)
# cmd = "$WGRIB2 " + input_forcings.file_in2 + " -match " + \
# "\":(HGT):(surface):\" " + \
# " -netcdf " + input_forcings.tmpFileHeight
# time.sleep(1)
# id_tmp_height = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFileHeight,
# cmd, config_options, mpi_config, 'HGT_surface')
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the height variable.
var_tmp = None
if mpi_config.rank == 0:
try:
var_tmp = id_tmp.variables['HGT_surface'][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract GFS elevation from: " + input_forcings.tmpFile + \
" (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place local GFS array into an ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding 13km GFS surface elevation data to the WRF-Hydro domain."
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid GFS elevation data: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to perform mask search on GFS elevation data: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.height[:, :] = input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract GFS elevation array from ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
# if mpi_config.rank == 0:
# try:
# id_tmp_height.close()
# except OSError:
# config_options.errMsg = "Unable to close temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
# try:
# os.remove(input_forcings.tmpFileHeight)
# except OSError:
# config_options.errMsg = "Unable to remove temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
try:
var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract: " + input_forcings.netcdf_var_names[force_count] + \
" from: " + input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are regridding GFS data, and this is precipitation, we need to run calculations
# on the global precipitation average rates to calculate instantaneous global rates.
# This is due to GFS's weird nature of doing average rates over different periods.
if input_forcings.productName == "GFS_Production_GRIB2":
if grib_var == "PRATE":
if mpi_config.rank == 0:
input_forcings.globalPcpRate2 = var_tmp
var_tmp = timeInterpMod.gfs_pcp_time_interp(input_forcings, config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
mpi_config.comm.barrier()
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place GFS local array into ESMF field object: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding Input 13km GFS Field: " + \
input_forcings.netcdf_var_names[force_count]
err_handler.log_msg(config_options, mpi_config)
try:
begin = time.monotonic()
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
end = time.monotonic()
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding took {} seconds".format(end-begin)
err_handler.log_msg(config_options, mpi_config)
except ValueError as ve:
config_options.errMsg = "Unable to regrid GFS variable: " + input_forcings.netcdf_var_names[force_count] \
+ " (" + str(ve) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run mask search on GFS variable: " + \
input_forcings.netcdf_var_names[force_count] + " (" + str(npe) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract GFS ESMF field data to local array: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
# DON'T REMOVE THE FILE, IT WILL EITHER BE REUSED or OVERWRITTEN
# try:
# os.remove(input_forcings.tmpFile)
# except OSError:
# config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile
# err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def regrid_nam_nest(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handing regridding of input NAM nest data
fro GRIB2 files.
:param mpi_config:
:param wrf_hydro_geo_meta:
:param input_forcings:
:param config_options:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(input_forcings.file_in2):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if input_forcings.regridComplete:
config_options.statusMsg = "No regridding of NAM nest data necessary for this timestep - already completed."
err_handler.log_msg(config_options, mpi_config)
return
# Create a path for a temporary NetCDF file
input_forcings.tmpFile = config_options.scratch_dir + "/" + "NAM_NEST_TMP-{}.nc".format(mkfilename())
err_handler.check_program_status(config_options, mpi_config)
if input_forcings.fileType != NETCDF:
# This file shouldn't exist.... but if it does (previously failed
# execution of the program), remove it.....
if mpi_config.rank == 0:
if os.path.isfile(input_forcings.tmpFile):
config_options.statusMsg = "Found old temporary file: " + \
input_forcings.tmpFile + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
err_handler.err_out(config_options)
err_handler.check_program_status(config_options, mpi_config)
fields = []
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Converting NAM-Nest Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
fields.append(':' + grib_var + ':' +
input_forcings.grib_levels[force_count] + ':'
+ str(input_forcings.fcst_hour2) + " hour fcst:")
fields.append(":(HGT):(surface):")
# Create a temporary NetCDF file from the GRIB2 file.
cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \
" -netcdf " + input_forcings.tmpFile
id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd,
config_options, mpi_config, inputVar=None)
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("NAM-Nest", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config)
# Loop through all of the input forcings in NAM nest data. Convert the GRIB2 files
# to NetCDF, read in the data, regrid it, then map it to the appropriate
# array slice in the output arrays.
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Processing NAM Nest Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings,
config_options, wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculating NAM nest regridding weights...."
err_handler.log_msg(config_options, mpi_config)
calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Read in the RAP height field, which is used for downscaling purposes.
# if mpi_config.rank == 0:
# config_options.statusMsg = "Reading in NAM nest elevation data from GRIB2."
# err_handler.log_msg(config_options, mpi_config)
# cmd = "$WGRIB2 " + input_forcings.file_in2 + " -match " + \
# "\":(HGT):(surface):\" " + \
# " -netcdf " + input_forcings.tmpFileHeight
# id_tmp_height = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFileHeight,
# cmd, config_options, mpi_config, 'HGT_surface')
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the height variable.
if mpi_config.rank == 0:
var_tmp = id_tmp.variables['HGT_surface'][0, :, :]
else:
var_tmp = None
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place NetCDF NAM nest elevation data into the ESMF field object: " \
+ str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding NAM nest elevation data to the WRF-Hydro domain."
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid NAM nest elevation data to the WRF-Hydro domain " \
"using ESMF: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to compute mask on NAM nest elevation data: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.height[:, :] = input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract ESMF regridded NAM nest elevation data to a local " \
"array: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
# if mpi_config.rank == 0:
# try:
# id_tmp_height.close()
# except OSError:
# config_options.errMsg = "Unable to close temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
#
# try:
# os.remove(input_forcings.tmpFileHeight)
# except OSError:
# config_options.errMsg = "Unable to remove temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
# err_handler.check_program_status(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding NAM nest input variable: " + \
input_forcings.netcdf_var_names[force_count]
err_handler.log_msg(config_options, mpi_config)
try:
var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract " + input_forcings.netcdf_var_names[force_count] + \
" from: " + input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place local array into local ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid input NAM nest forcing variables using ESMF: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to calculate mask from input NAM nest regridded forcings: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place local ESMF regridded data into local array: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def regrid_mrms_hourly(supplemental_precip, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handling regridding hourly MRMS precipitation. An RQI mask file
Is necessary to filter out poor precipitation estimates.
:param supplemental_precip:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(supplemental_precip.file_in2):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if supplemental_precip.regridComplete:
if mpi_config.rank == 0:
config_options.statusMsg = "No MRMS regridding required for this timestep."
err_handler.log_msg(config_options, mpi_config)
return
# MRMS data originally is stored as .gz files. We need to compose a series
# of temporary paths.
# 1.) The unzipped GRIB2 precipitation file.
# 2.) The unzipped GRIB2 RQI file.
# 3.) A temporary NetCDF file that stores the precipitation grid.
# 4.) A temporary NetCDF file that stores the RQI grid.
# Create a path for a temporary NetCDF files that will
# be created through the wgrib2 process.
mrms_tmp_grib2 = config_options.scratch_dir + "/MRMS_PCP_TMP-{}.grib2".format(mkfilename())
mrms_tmp_nc = config_options.scratch_dir + "/MRMS_PCP_TMP-{}.nc".format(mkfilename())
mrms_tmp_rqi_grib2 = config_options.scratch_dir + "/MRMS_RQI_TMP-{}.grib2".format(mkfilename())
mrms_tmp_rqi_nc = config_options.scratch_dir + "/MRMS_RQI_TMP-{}.nc".format(mkfilename())
# mpi_config.comm.barrier()
# If the input paths have been set to None, this means input is missing. We will
# alert the user, and set the final output grids to be the global NDV and return.
if not supplemental_precip.file_in1 or not supplemental_precip.file_in2:
if mpi_config.rank == 0:
config_options.statusMsg = "No MRMS Precipitation available. Supplemental precipitation will " \
"not be used."
err_handler.log_msg(config_options, mpi_config)
supplemental_precip.regridded_precip2 = None
supplemental_precip.regridded_precip1 = None
return
# These files shouldn't exist. If they do, remove them.
if mpi_config.rank == 0:
if os.path.isfile(mrms_tmp_grib2):
config_options.statusMsg = "Found old temporary file: " + \
mrms_tmp_grib2 + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(mrms_tmp_grib2)
except OSError:
config_options.errMsg = "Unable to remove file: " + mrms_tmp_grib2
err_handler.log_critical(config_options, mpi_config)
if os.path.isfile(mrms_tmp_nc):
config_options.statusMsg = "Found old temporary file: " + \
mrms_tmp_nc + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(mrms_tmp_nc)
except OSError:
config_options.errMsg = "Unable to remove file: " + mrms_tmp_nc
err_handler.log_critical(config_options, mpi_config)
if os.path.isfile(mrms_tmp_rqi_grib2):
config_options.statusMsg = "Found old temporary file: " + \
mrms_tmp_rqi_grib2 + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(mrms_tmp_rqi_grib2)
except OSError:
config_options.errMsg = "Unable to remove file: " + mrms_tmp_rqi_grib2
err_handler.log_critical(config_options, mpi_config)
if os.path.isfile(mrms_tmp_rqi_nc):
config_options.statusMsg = "Found old temporary file: " + \
mrms_tmp_rqi_nc + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(mrms_tmp_rqi_nc)
except OSError:
config_options.errMsg = "Unable to remove file: " + mrms_tmp_rqi_nc
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If the input paths have been set to None, this means input is missing. We will
# alert the user, and set the final output grids to be the global NDV and return.
# if not supplemental_precip.file_in1 or not supplemental_precip.file_in2:
# if MpiConfig.rank == 0:
# ConfigOptions.statusMsg = "No MRMS Precipitation available. Supplemental precipitation will " \
# "not be used."
# errMod.log_msg(ConfigOptions, MpiConfig)
# supplemental_precip.regridded_precip2 = None
# supplemental_precip.regridded_precip1 = None
# return
if supplemental_precip.fileType != NETCDF:
# Unzip MRMS files to temporary locations.
ioMod.unzip_file(supplemental_precip.file_in2, mrms_tmp_grib2, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if config_options.rqiMethod == 1:
ioMod.unzip_file(supplemental_precip.rqi_file_in2, mrms_tmp_rqi_grib2, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Perform a GRIB dump to NetCDF for the MRMS precip and RQI data.
cmd1 = "$WGRIB2 " + mrms_tmp_grib2 + " -netcdf " + mrms_tmp_nc
id_mrms = ioMod.open_grib2(mrms_tmp_grib2, mrms_tmp_nc, cmd1, config_options,
mpi_config, supplemental_precip.netcdf_var_names[0])
err_handler.check_program_status(config_options, mpi_config)
if config_options.rqiMethod == 1:
cmd2 = "$WGRIB2 " + mrms_tmp_rqi_grib2 + " -netcdf " + mrms_tmp_rqi_nc
id_mrms_rqi = ioMod.open_grib2(mrms_tmp_rqi_grib2, mrms_tmp_rqi_nc, cmd2, config_options,
mpi_config, supplemental_precip.rqi_netcdf_var_names[0])
err_handler.check_program_status(config_options, mpi_config)
else:
id_mrms_rqi = None
# Remove temporary GRIB2 files
if mpi_config.rank == 0:
try:
os.remove(mrms_tmp_grib2)
except OSError:
config_options.errMsg = "Unable to remove GRIB2 file: " + mrms_tmp_grib2
err_handler.log_critical(config_options, mpi_config)
if config_options.rqiMethod == 1:
try:
os.remove(mrms_tmp_rqi_grib2)
except OSError:
config_options.errMsg = "Unable to remove GRIB2 file: " + mrms_tmp_rqi_grib2
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("MRMS", supplemental_precip.file_in2, mrms_tmp_nc, config_options, mpi_config)
id_mrms = ioMod.open_netcdf_forcing(mrms_tmp_nc, config_options, mpi_config)
if config_options.rqiMethod == 1:
create_link("RQI", supplemental_precip.rqi_file_in2, mrms_tmp_rqi_nc, config_options, mpi_config)
id_mrms_rqi = ioMod.open_netcdf_forcing(mrms_tmp_rqi_nc, config_options, mpi_config)
else:
id_mrms_rqi = None
# Check to see if we need to calculate regridding weights.
calc_regrid_flag = check_supp_pcp_regrid_status(id_mrms, supplemental_precip, config_options,
wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculating MRMS regridding weights."
err_handler.log_msg(config_options, mpi_config)
calculate_supp_pcp_weights(supplemental_precip, id_mrms, mrms_tmp_nc, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Regrid the RQI grid.
if config_options.rqiMethod == 1:
var_tmp = None
if mpi_config.rank == 0:
try:
var_tmp = id_mrms_rqi.variables[supplemental_precip.rqi_netcdf_var_names[0]][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract: " + supplemental_precip.rqi_netcdf_var_names[0] + \
" from: " + mrms_tmp_rqi_grib2 + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
supplemental_precip.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place MRMS data into local ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding MRMS RQI Field."
err_handler.log_msg(config_options, mpi_config)
try:
supplemental_precip.esmf_field_out = supplemental_precip.regridObj(supplemental_precip.esmf_field_in,
supplemental_precip.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid MRMS RQI field: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
supplemental_precip.esmf_field_out.data[np.where(supplemental_precip.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run mask calculation for MRMS RQI data: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if not config_options.rqiMethod:
# We will set the RQI field to 1.0 here so no MRMS data gets masked out.
supplemental_precip.regridded_rqi2[:, :] = 1.0
if mpi_config.rank == 0:
config_options.statusMsg = "MRMS Will not be filtered using RQI values."
err_handler.log_msg(config_options, mpi_config)
elif config_options.rqiMethod == 2:
# Read in the RQI field from monthly climatological files.
ioMod.read_rqi_monthly_climo(config_options, mpi_config, supplemental_precip, wrf_hydro_geo_meta)
elif config_options.rqiMethod == 1:
# We are using the MRMS RQI field in realtime
supplemental_precip.regridded_rqi2[:, :] = supplemental_precip.esmf_field_out.data
err_handler.check_program_status(config_options, mpi_config)
if config_options.rqiMethod == 1:
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_mrms_rqi.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + mrms_tmp_rqi_nc
err_handler.log_critical(config_options, mpi_config)
try:
os.remove(mrms_tmp_rqi_nc)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + mrms_tmp_rqi_nc
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding: " + supplemental_precip.netcdf_var_names[0]
err_handler.log_msg(config_options, mpi_config)
try:
var_tmp = id_mrms.variables[supplemental_precip.netcdf_var_names[0]][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract: " + supplemental_precip.netcdf_var_names[0] + \
" from: " + mrms_tmp_nc + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
supplemental_precip.esmf_field_in.data[:, :] = var_sub_tmp
err_handler.check_program_status(config_options, mpi_config)
try:
supplemental_precip.esmf_field_out = supplemental_precip.regridObj(supplemental_precip.esmf_field_in,
supplemental_precip.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid MRMS precipitation: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
supplemental_precip.esmf_field_out.data[np.where(supplemental_precip.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run mask search on MRMS supplemental precip: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
supplemental_precip.regridded_precip2[:, :] = \
supplemental_precip.esmf_field_out.data
err_handler.check_program_status(config_options, mpi_config)
# Check for any RQI values below the threshold specified by the user.
# Set these values to global NDV.
try:
ind_filter = np.where(supplemental_precip.regridded_rqi2 < config_options.rqiThresh)
supplemental_precip.regridded_precip2[ind_filter] = config_options.globalNdv
del ind_filter
except (ValueError, AttributeError, KeyError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run MRMS RQI threshold search: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Convert the hourly precipitation total to a rate of mm/s
try:
ind_valid = np.where(supplemental_precip.regridded_precip2 != config_options.globalNdv)
supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 3600.0
del ind_valid
except (ValueError, AttributeError, ArithmeticError, KeyError) as npe:
config_options.errMsg = "Unable to run global NDV search on MRMS regridded precip: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
supplemental_precip.regridded_precip1[:, :] = \
supplemental_precip.regridded_precip2[:, :]
supplemental_precip.regridded_rqi1[:, :] = \
supplemental_precip.regridded_rqi2[:, :]
# mpi_config.comm.barrier()
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_mrms.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + mrms_tmp_nc
err_handler.log_critical(config_options, mpi_config)
try:
os.remove(mrms_tmp_nc)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + mrms_tmp_nc
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def regrid_hourly_wrf_arw(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handing regridding of input NAM nest data
fro GRIB2 files.
:param mpi_config:
:param wrf_hydro_geo_meta:
:param input_forcings:
:param config_options:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.isfile(input_forcings.file_in2):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if input_forcings.regridComplete:
config_options.statusMsg = "No regridding of WRF-ARW nest data necessary for this timestep - already completed."
err_handler.log_msg(config_options, mpi_config)
return
# Create a path for a temporary NetCDF file
input_forcings.tmpFile = config_options.scratch_dir + "/" + "ARW_TMP-{}.nc".format(mkfilename())
err_handler.check_program_status(config_options, mpi_config)
if input_forcings.fileType != NETCDF:
# This file shouldn't exist.... but if it does (previously failed
# execution of the program), remove it.....
if mpi_config.rank == 0:
if os.path.isfile(input_forcings.tmpFile):
config_options.statusMsg = "Found old temporary file: " + \
input_forcings.tmpFile + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
err_handler.err_out(config_options)
err_handler.check_program_status(config_options, mpi_config)
fields = []
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Converting WRF-ARW Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
time_str = "{}-{} hour acc fcst".format(input_forcings.fcst_hour1, input_forcings.fcst_hour2) \
if grib_var == 'APCP' else str(input_forcings.fcst_hour2) + " hour fcst"
fields.append(':' + grib_var + ':' +
input_forcings.grib_levels[force_count] + ':'
+ time_str + ":")
fields.append(":(HGT):(surface):")
# Create a temporary NetCDF file from the GRIB2 file.
cmd = '$WGRIB2 -match "(' + '|'.join(fields) + ')" ' + input_forcings.file_in2 + \
" -netcdf " + input_forcings.tmpFile
id_tmp = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFile, cmd,
config_options, mpi_config, inputVar=None)
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("WRF-ARW", input_forcings.file_in2, input_forcings.tmpFile, config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(input_forcings.tmpFile, config_options, mpi_config)
# Loop through all of the input forcings in NAM nest data. Convert the GRIB2 files
# to NetCDF, read in the data, regrid it, then map it to the appropriate
# array slice in the output arrays.
for force_count, grib_var in enumerate(input_forcings.grib_vars):
if mpi_config.rank == 0:
config_options.statusMsg = "Processing WRF-ARW Variable: " + grib_var
err_handler.log_msg(config_options, mpi_config)
calc_regrid_flag = check_regrid_status(id_tmp, force_count, input_forcings,
config_options, wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculating WRF-ARW regridding weights...."
err_handler.log_msg(config_options, mpi_config)
calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Read in the RAP height field, which is used for downscaling purposes.
# if mpi_config.rank == 0:
# config_options.statusMsg = "Reading in WRF-ARW elevation data from GRIB2."
# err_handler.log_msg(config_options, mpi_config)
# cmd = "$WGRIB2 " + input_forcings.file_in2 + " -match " + \
# "\":(HGT):(surface):\" " + \
# " -netcdf " + input_forcings.tmpFileHeight
# id_tmp_height = ioMod.open_grib2(input_forcings.file_in2, input_forcings.tmpFileHeight,
# cmd, config_options, mpi_config, 'HGT_surface')
# err_handler.check_program_status(config_options, mpi_config)
# Regrid the height variable.
if mpi_config.rank == 0:
var_tmp = id_tmp.variables['HGT_surface'][0, :, :]
else:
var_tmp = None
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place NetCDF WRF-ARW elevation data into the ESMF field object: " \
+ str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding WRF-ARW elevation data to the WRF-Hydro domain."
err_handler.log_msg(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid WRF-ARW elevation data to the WRF-Hydro domain " \
"using ESMF: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to compute mask on WRF-ARW elevation data: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.height[:, :] = input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract ESMF regridded WRF-ARW elevation data to a local " \
"array: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
# if mpi_config.rank == 0:
# try:
# id_tmp_height.close()
# except OSError:
# config_options.errMsg = "Unable to close temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
#
# try:
# os.remove(input_forcings.tmpFileHeight)
# except OSError:
# config_options.errMsg = "Unable to remove temporary file: " + input_forcings.tmpFileHeight
# err_handler.log_critical(config_options, mpi_config)
# err_handler.check_program_status(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding WRF-ARW input variable: " + \
input_forcings.netcdf_var_names[force_count]
err_handler.log_msg(config_options, mpi_config)
try:
var_tmp = id_tmp.variables[input_forcings.netcdf_var_names[force_count]][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract " + input_forcings.netcdf_var_names[force_count] + \
" from: " + input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place local array into local ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid input WRF-ARW forcing variables using ESMF: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
input_forcings.esmf_field_out.data[np.where(input_forcings.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to calculate mask from input WRF-ARW regridded forcings: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Convert the hourly precipitation total to a rate of mm/s
if grib_var == 'APCP':
try:
ind_valid = np.where(input_forcings.esmf_field_out.data != config_options.globalNdv)
input_forcings.esmf_field_out.data[ind_valid] = input_forcings.esmf_field_out.data[ind_valid] / 3600.0
del ind_valid
except (ValueError, ArithmeticError, AttributeError, KeyError) as npe:
config_options.errMsg = "Unable to run NDV search on WRF ARW precipitation: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.esmf_field_out.data
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place local ESMF regridded data into local array: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
input_forcings.regridded_forcings1[input_forcings.input_map_output[force_count], :, :] = \
input_forcings.regridded_forcings2[input_forcings.input_map_output[force_count], :, :]
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
try:
os.remove(input_forcings.tmpFile)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + input_forcings.tmpFile
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def regrid_hourly_wrf_arw_hi_res_pcp(supplemental_precip, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for handling regridding hourly forecasted ARW precipitation for hi-res nests.
:param supplemental_precip:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the expected file is missing, this means we are allowing missing files, simply
# exit out of this routine as the regridded fields have already been set to NDV.
if not os.path.exists(supplemental_precip.file_in1):
return
# Check to see if the regrid complete flag for this
# output time step is true. This entails the necessary
# inputs have already been regridded and we can move on.
if supplemental_precip.regridComplete:
if mpi_config.rank == 0:
config_options.statusMsg = "No ARW regridding required for this timestep."
err_handler.log_msg(config_options, mpi_config)
return
# Create a path for a temporary NetCDF files that will
# be created through the wgrib2 process.
arw_tmp_nc = config_options.scratch_dir + "/ARW_PCP_TMP-{}.nc".format(mkfilename())
if supplemental_precip.fileType != NETCDF:
# These files shouldn't exist. If they do, remove them.
if mpi_config.rank == 0:
if os.path.isfile(arw_tmp_nc):
config_options.statusMsg = "Found old temporary file: " + \
arw_tmp_nc + " - Removing....."
err_handler.log_warning(config_options, mpi_config)
try:
os.remove(arw_tmp_nc)
except IOError:
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If the input paths have been set to None, this means input is missing. We will
# alert the user, and set the final output grids to be the global NDV and return.
# if not supplemental_precip.file_in1 or not supplemental_precip.file_in1:
# if MpiConfig.rank == 0:
# "NO ARW PRECIP AVAILABLE. SETTING FINAL SUPP GRIDS TO NDV"
# supplemental_precip.regridded_precip2 = None
# supplemental_precip.regridded_precip1 = None
# return
# errMod.check_program_status(ConfigOptions, MpiConfig)
# Create a temporary NetCDF file from the GRIB2 file.
cmd = "$WGRIB2 " + supplemental_precip.file_in1 + " -match \":(" + \
"APCP):(surface):(" + str(supplemental_precip.fcst_hour1 - 1) + \
"-" + str(supplemental_precip.fcst_hour1) + " hour acc fcst):\"" + \
" -netcdf " + arw_tmp_nc
id_tmp = ioMod.open_grib2(supplemental_precip.file_in1, arw_tmp_nc, cmd,
config_options, mpi_config, "APCP_surface")
err_handler.check_program_status(config_options, mpi_config)
else:
create_link("ARW-PCP", supplemental_precip.file_in1, arw_tmp_nc, config_options, mpi_config)
id_tmp = ioMod.open_netcdf_forcing(arw_tmp_nc, config_options, mpi_config)
# Check to see if we need to calculate regridding weights.
calc_regrid_flag = check_supp_pcp_regrid_status(id_tmp, supplemental_precip, config_options,
wrf_hydro_geo_meta, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
if calc_regrid_flag:
if mpi_config.rank == 0:
config_options.statusMsg = "Calculating WRF ARW regridding weights."
err_handler.log_msg(config_options, mpi_config)
calculate_supp_pcp_weights(supplemental_precip, id_tmp, arw_tmp_nc, config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Regrid the input variables.
var_tmp = None
if mpi_config.rank == 0:
if mpi_config.rank == 0:
config_options.statusMsg = "Regridding WRF ARW APCP Precipitation."
err_handler.log_msg(config_options, mpi_config)
try:
var_tmp = id_tmp.variables['APCP_surface'][0, :, :]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract precipitation from WRF ARW file: " + \
supplemental_precip.file_in1 + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
var_sub_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
supplemental_precip.esmf_field_in.data[:, :] = var_sub_tmp
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to place WRF ARW precipitation into local ESMF field: " + str(err)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
supplemental_precip.esmf_field_out = supplemental_precip.regridObj(supplemental_precip.esmf_field_in,
supplemental_precip.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to regrid WRF ARW supplemental precipitation: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Set any pixel cells outside the input domain to the global missing value.
try:
supplemental_precip.esmf_field_out.data[np.where(supplemental_precip.regridded_mask == 0)] = \
config_options.globalNdv
except (ValueError, ArithmeticError) as npe:
config_options.errMsg = "Unable to run mask search on WRF ARW supplemental precipitation: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
supplemental_precip.regridded_precip2[:, :] = supplemental_precip.esmf_field_out.data
err_handler.check_program_status(config_options, mpi_config)
# Convert the hourly precipitation total to a rate of mm/s
try:
ind_valid = np.where(supplemental_precip.regridded_precip2 != config_options.globalNdv)
supplemental_precip.regridded_precip2[ind_valid] = supplemental_precip.regridded_precip2[ind_valid] / 3600.0
del ind_valid
except (ValueError, ArithmeticError, AttributeError, KeyError) as npe:
config_options.errMsg = "Unable to run NDV search on WRF ARW supplemental precipitation: " + str(npe)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# If we are on the first timestep, set the previous regridded field to be
# the latest as there are no states for time 0.
if config_options.current_output_step == 1:
supplemental_precip.regridded_precip1[:, :] = \
supplemental_precip.regridded_precip2[:, :]
err_handler.check_program_status(config_options, mpi_config)
# Close the temporary NetCDF file and remove it.
if mpi_config.rank == 0:
try:
id_tmp.close()
except OSError:
config_options.errMsg = "Unable to close NetCDF file: " + arw_tmp_nc
err_handler.log_critical(config_options, mpi_config)
try:
os.remove(arw_tmp_nc)
except OSError:
config_options.errMsg = "Unable to remove NetCDF file: " + arw_tmp_nc
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
def check_regrid_status(id_tmp, force_count, input_forcings, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for checking to see if regridding weights need to be
calculated (or recalculated).
:param wrf_hydro_geo_meta:
:param force_count:
:param id_tmp:
:param input_forcings:
:param config_options:
:param mpi_config:
:return:
"""
# If the destination ESMF field hasn't been created, create it here.
if not input_forcings.esmf_field_out:
try:
input_forcings.esmf_field_out = ESMF.Field(wrf_hydro_geo_meta.esmf_grid,
name=input_forcings.productName + 'FORCING_REGRIDDED')
except ESMF.ESMPyException as esmf_error:
config_options.errMsg = "Unable to create " + input_forcings.productName + \
" destination ESMF field object: " + str(esmf_error)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Determine if we need to calculate a regridding object. The following situations warrant the calculation of
# a new weight file:
# 1.) This is the first output time step, so we need to calculate a weight file.
# 2.) The input forcing grid has changed.
calc_regrid_flag = False
# mpi_config.comm.barrier()
if input_forcings.nx_global is None or input_forcings.ny_global is None:
# This is the first timestep.
# Create out regridded numpy arrays to hold the regridded data.
input_forcings.regridded_forcings1 = np.empty([8, wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
input_forcings.regridded_forcings2 = np.empty([8, wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
if mpi_config.rank == 0:
if input_forcings.nx_global is None or input_forcings.ny_global is None:
# This is the first timestep.
calc_regrid_flag = True
else:
if mpi_config.rank == 0:
if id_tmp.variables[input_forcings.netcdf_var_names[force_count]].shape[1] \
!= input_forcings.ny_global and \
id_tmp.variables[input_forcings.netcdf_var_names[force_count]].shape[2] \
!= input_forcings.nx_global:
calc_regrid_flag = True
# mpi_config.comm.barrier()
# Broadcast the flag to the other processors.
calc_regrid_flag = mpi_config.broadcast_parameter(calc_regrid_flag, config_options, param_type=bool)
err_handler.check_program_status(config_options, mpi_config)
return calc_regrid_flag
def check_supp_pcp_regrid_status(id_tmp, supplemental_precip, config_options, wrf_hydro_geo_meta, mpi_config):
"""
Function for checking to see if regridding weights need to be
calculated (or recalculated).
:param supplemental_precip:
:param id_tmp:
:param config_options:
:param wrf_hydro_geo_meta:
:param mpi_config:
:return:
"""
# If the destination ESMF field hasn't been created, create it here.
if not supplemental_precip.esmf_field_out:
try:
supplemental_precip.esmf_field_out = ESMF.Field(wrf_hydro_geo_meta.esmf_grid,
name=supplemental_precip.productName + 'SUPP_PCP_REGRIDDED')
except ESMF.ESMPyException as esmf_error:
config_options.errMsg = "Unable to create " + supplemental_precip.productName + \
" destination ESMF field object: " + str(esmf_error)
err_handler.err_out(config_options)
# Determine if we need to calculate a regridding object. The following situations warrant the calculation of
# a new weight file:
# 1.) This is the first output time step, so we need to calculate a weight file.
# 2.) The input forcing grid has changed.
calc_regrid_flag = False
# mpi_config.comm.barrier()
if supplemental_precip.nx_global is None or supplemental_precip.ny_global is None:
# This is the first timestep.
# Create out regridded numpy arrays to hold the regridded data.
supplemental_precip.regridded_precip1 = np.empty([wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
supplemental_precip.regridded_precip2 = np.empty([wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
supplemental_precip.regridded_rqi1 = np.empty([wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
supplemental_precip.regridded_rqi2 = np.empty([wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
supplemental_precip.regridded_rqi1[:, :] = config_options.globalNdv
supplemental_precip.regridded_rqi2[:, :] = config_options.globalNdv
if mpi_config.rank == 0:
if supplemental_precip.nx_global is None or supplemental_precip.ny_global is None:
# This is the first timestep.
calc_regrid_flag = True
else:
if mpi_config.rank == 0:
if id_tmp.variables[supplemental_precip.netcdf_var_names[0]].shape[1] \
!= supplemental_precip.ny_global and \
id_tmp.variables[supplemental_precip.netcdf_var_names[0]].shape[2] \
!= supplemental_precip.nx_global:
calc_regrid_flag = True
# We will now check to see if the regridded arrays are still None. This means the fields were set to None
# earlier for missing data. We need to reset them to nx_global/ny_global where the calc_regrid_flag is False.
if supplemental_precip.regridded_precip2 is None:
supplemental_precip.regridded_precip2 = np.empty([wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
if supplemental_precip.regridded_precip1 is None:
supplemental_precip.regridded_precip1 = np.empty([wrf_hydro_geo_meta.ny_local, wrf_hydro_geo_meta.nx_local],
np.float32)
# mpi_config.comm.barrier()
# Broadcast the flag to the other processors.
calc_regrid_flag = mpi_config.broadcast_parameter(calc_regrid_flag, config_options, param_type=bool)
mpi_config.comm.barrier()
return calc_regrid_flag
def calculate_weights(id_tmp, force_count, input_forcings, config_options, mpi_config):
"""
Function to calculate ESMF weights based on the output ESMF
field previously calculated, along with input lat/lon grids,
and a sample dataset.
:param input_forcings:
:param id_tmp:
:param mpi_config:
:param config_options:
:param force_count:
:return:
"""
if mpi_config.rank == 0:
try:
input_forcings.ny_global = id_tmp.variables[input_forcings.netcdf_var_names[force_count]].shape[1]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract Y shape size from: " + \
input_forcings.netcdf_var_names[force_count] + " from: " + \
input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
try:
input_forcings.nx_global = id_tmp.variables[input_forcings.netcdf_var_names[force_count]].shape[2]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract X shape size from: " + \
input_forcings.netcdf_var_names[force_count] + " from: " + \
input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Broadcast the forcing nx/ny values
input_forcings.ny_global = mpi_config.broadcast_parameter(input_forcings.ny_global,
config_options, param_type=int)
err_handler.check_program_status(config_options, mpi_config)
input_forcings.nx_global = mpi_config.broadcast_parameter(input_forcings.nx_global,
config_options, param_type=int)
err_handler.check_program_status(config_options, mpi_config)
try:
# noinspection PyTypeChecker
input_forcings.esmf_grid_in = ESMF.Grid(np.array([input_forcings.ny_global, input_forcings.nx_global]),
staggerloc=ESMF.StaggerLoc.CENTER,
coord_sys=ESMF.CoordSys.SPH_DEG)
except ESMF.ESMPyException as esmf_error:
config_options.errMsg = "Unable to create source ESMF grid from temporary file: " + \
input_forcings.tmpFile + " (" + str(esmf_error) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.x_lower_bound = input_forcings.esmf_grid_in.lower_bounds[ESMF.StaggerLoc.CENTER][1]
input_forcings.x_upper_bound = input_forcings.esmf_grid_in.upper_bounds[ESMF.StaggerLoc.CENTER][1]
input_forcings.y_lower_bound = input_forcings.esmf_grid_in.lower_bounds[ESMF.StaggerLoc.CENTER][0]
input_forcings.y_upper_bound = input_forcings.esmf_grid_in.upper_bounds[ESMF.StaggerLoc.CENTER][0]
input_forcings.nx_local = input_forcings.x_upper_bound - input_forcings.x_lower_bound
input_forcings.ny_local = input_forcings.y_upper_bound - input_forcings.y_lower_bound
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract local X/Y boundaries from global grid from temporary " + \
"file: " + input_forcings.tmpFile + " (" + str(err) + ")"
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Check to make sure we have enough dimensionality to run regridding. ESMF requires both grids
# to have a size of at least 2.
if input_forcings.nx_local < 2 or input_forcings.ny_local < 2:
config_options.errMsg = "You have either specified too many cores for: " + input_forcings.productName + \
", or your input forcing grid is too small to process. Local grid must " \
"have x/y dimension size of 2."
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# check if we're doing border trimming and set up mask
border = input_forcings.border # // 5 # HRRR is a 3 km product
if border > 0:
try:
mask = input_forcings.esmf_grid_in.add_item(ESMF.GridItem.MASK, ESMF.StaggerLoc.CENTER)
if mpi_config.rank == 0:
config_options.statusMsg = "Trimming input forcing `{}` by {} grid cells".format(
input_forcings.productName,
border)
err_handler.log_msg(config_options, mpi_config)
gmask = np.ones([input_forcings.ny_global, input_forcings.nx_global])
gmask[:+border, :] = 0. # top edge
gmask[-border:, :] = 0. # bottom edge
gmask[:, :+border] = 0. # left edge
gmask[:, -border:] = 0. # right edge
mask[:, :] = mpi_config.scatter_array(input_forcings, gmask, config_options)
err_handler.check_program_status(config_options, mpi_config)
except Exception as e:
print(e, flush=True)
lat_tmp = None
lon_tmp = None
if mpi_config.rank == 0:
# Process lat/lon values from the GFS grid.
if len(id_tmp.variables['latitude'].shape) == 3:
# We have 2D grids already in place.
lat_tmp = id_tmp.variables['latitude'][0, :, :]
lon_tmp = id_tmp.variables['longitude'][0, :, :]
elif len(id_tmp.variables['longitude'].shape) == 2:
# We have 2D grids already in place.
lat_tmp = id_tmp.variables['latitude'][:, :]
lon_tmp = id_tmp.variables['longitude'][:, :]
elif len(id_tmp.variables['latitude'].shape) == 1:
# We have 1D lat/lons we need to translate into
# 2D grids.
lat_tmp = np.repeat(id_tmp.variables['latitude'][:][:, np.newaxis], input_forcings.nx_global, axis=1)
lon_tmp = np.tile(id_tmp.variables['longitude'][:], (input_forcings.ny_global, 1))
err_handler.check_program_status(config_options, mpi_config)
# Scatter global GFS latitude grid to processors..
if mpi_config.rank == 0:
var_tmp = lat_tmp
else:
var_tmp = None
var_sub_lat_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
if mpi_config.rank == 0:
var_tmp = lon_tmp
else:
var_tmp = None
var_sub_lon_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_lats = input_forcings.esmf_grid_in.get_coords(1)
except ESMF.GridException as ge:
config_options.errMsg = "Unable to locate latitude coordinate object within input ESMF grid: " + str(ge)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
input_forcings.esmf_lons = input_forcings.esmf_grid_in.get_coords(0)
except ESMF.GridException as ge:
config_options.errMsg = "Unable to locate longitude coordinate object within input ESMF grid: " + str(ge)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
input_forcings.esmf_lats[:, :] = var_sub_lat_tmp
input_forcings.esmf_lons[:, :] = var_sub_lon_tmp
del var_sub_lat_tmp
del var_sub_lon_tmp
del lat_tmp
del lon_tmp
# Create a ESMF field to hold the incoming data.
try:
input_forcings.esmf_field_in = ESMF.Field(input_forcings.esmf_grid_in,
name=input_forcings.productName + "_NATIVE")
except ESMF.ESMPyException as esmf_error:
config_options.errMsg = "Unable to create ESMF field object: " + str(esmf_error)
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
# Scatter global grid to processors..
if mpi_config.rank == 0:
var_tmp = id_tmp[input_forcings.netcdf_var_names[force_count]][0, :, :]
# Set all valid values to 1.0, and all missing values to 0.0. This will
# be used to generate an output mask that is used later on in downscaling, layering,
# etc.
var_tmp[:, :] = 1.0
else:
var_tmp = None
var_sub_tmp = mpi_config.scatter_array(input_forcings, var_tmp, config_options)
err_handler.check_program_status(config_options, mpi_config)
# Place temporary data into the field array for generating the regridding object.
input_forcings.esmf_field_in.data[:, :] = var_sub_tmp
# mpi_config.comm.barrier()
# ## CALCULATE WEIGHT ## #
# Try to find a pre-existing weight file, if available
weight_file = None
if config_options.weightsDir is not None:
grid_key = input_forcings.productName
weight_file = os.path.join(config_options.weightsDir, "ESMF_weight_{}_b{}.nc4".format(grid_key, border))
# check if file exists:
if os.path.exists(weight_file):
# read the data
try:
if mpi_config.rank == 0:
config_options.statusMsg = "Loading cached ESMF weight object for " + input_forcings.productName + \
" from " + weight_file
err_handler.log_msg(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
begin = time.monotonic()
input_forcings.regridObj = ESMF.RegridFromFile(input_forcings.esmf_field_in,
input_forcings.esmf_field_out,
weight_file)
end = time.monotonic()
if mpi_config.rank == 0:
config_options.statusMsg = "Finished loading weight object with ESMF, took {} seconds".format(
end - begin)
err_handler.log_msg(config_options, mpi_config)
except (IOError, ValueError, ESMF.ESMPyException) as esmf_error:
config_options.errMsg = "Unable to load cached ESMF weight file: " + str(esmf_error)
err_handler.log_warning(config_options, mpi_config)
if input_forcings.regridObj is None:
if mpi_config.rank == 0:
config_options.statusMsg = "Creating weight object from ESMF"
err_handler.log_msg(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
try:
begin = time.monotonic()
input_forcings.regridObj = ESMF.Regrid(input_forcings.esmf_field_in,
input_forcings.esmf_field_out,
src_mask_values=np.array([0]),
regrid_method=ESMF.RegridMethod.BILINEAR,
unmapped_action=ESMF.UnmappedAction.IGNORE,
filename=weight_file)
end = time.monotonic()
if mpi_config.rank == 0:
config_options.statusMsg = "Finished generating weight object with ESMF, took {} seconds".format(
end - begin)
err_handler.log_msg(config_options, mpi_config)
except (RuntimeError, ImportError, ESMF.ESMPyException) as esmf_error:
config_options.errMsg = "Unable to regrid input data from ESMF: " + str(esmf_error)
err_handler.log_critical(config_options, mpi_config)
etype, value, tb = sys.exc_info()
traceback.print_exception(etype, value, tb)
print(input_forcings.esmf_field_in)
print(input_forcings.esmf_field_out)
print(np.array([0]))
err_handler.check_program_status(config_options, mpi_config)
# Run the regridding object on this test dataset. Check the output grid for
# any 0 values.
try:
input_forcings.esmf_field_out = input_forcings.regridObj(input_forcings.esmf_field_in,
input_forcings.esmf_field_out)
except ValueError as ve:
config_options.errMsg = "Unable to extract regridded data from ESMF regridded field: " + str(ve)
err_handler.log_critical(config_options, mpi_config)
# delete bad cached file if it exists
if weight_file is not None:
if os.path.exists(weight_file):
os.remove(weight_file)
err_handler.check_program_status(config_options, mpi_config)
input_forcings.regridded_mask[:, :] = input_forcings.esmf_field_out.data[:, :]
def calculate_supp_pcp_weights(supplemental_precip, id_tmp, tmp_file, config_options, mpi_config):
"""
Function to calculate ESMF weights based on the output ESMF
field previously calculated, along with input lat/lon grids,
and a sample dataset.
:param tmp_file:
:param id_tmp:
:param supplemental_precip:
:param mpi_config:
:param config_options:
:return:
"""
if mpi_config.rank == 0:
try:
supplemental_precip.ny_global = id_tmp.variables[supplemental_precip.netcdf_var_names[0]].shape[1]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract Y shape size from: " + \
supplemental_precip.netcdf_var_names[0] + " from: " + \
tmp_file + " (" + str(err) + ")"
err_handler.err_out(config_options)
try:
supplemental_precip.nx_global = id_tmp.variables[supplemental_precip.netcdf_var_names[0]].shape[2]
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract X shape size from: " + \
supplemental_precip.netcdf_var_names[0] + " from: " + \
tmp_file + " (" + str(err) + ")"
err_handler.err_out(config_options)
# mpi_config.comm.barrier()
# Broadcast the forcing nx/ny values
supplemental_precip.ny_global = mpi_config.broadcast_parameter(supplemental_precip.ny_global,
config_options, param_type=int)
supplemental_precip.nx_global = mpi_config.broadcast_parameter(supplemental_precip.nx_global,
config_options, param_type=int)
# mpi_config.comm.barrier()
try:
# noinspection PyTypeChecker
supplemental_precip.esmf_grid_in = ESMF.Grid(np.array([supplemental_precip.ny_global,
supplemental_precip.nx_global]),
staggerloc=ESMF.StaggerLoc.CENTER,
coord_sys=ESMF.CoordSys.SPH_DEG)
except ESMF.ESMPyException as esmf_error:
config_options.errMsg = "Unable to create source ESMF grid from temporary file: " + \
tmp_file + " (" + str(esmf_error) + ")"
err_handler.err_out(config_options)
# mpi_config.comm.barrier()
try:
supplemental_precip.x_lower_bound = supplemental_precip.esmf_grid_in.lower_bounds[ESMF.StaggerLoc.CENTER][1]
supplemental_precip.x_upper_bound = supplemental_precip.esmf_grid_in.upper_bounds[ESMF.StaggerLoc.CENTER][1]
supplemental_precip.y_lower_bound = supplemental_precip.esmf_grid_in.lower_bounds[ESMF.StaggerLoc.CENTER][0]
supplemental_precip.y_upper_bound = supplemental_precip.esmf_grid_in.upper_bounds[ESMF.StaggerLoc.CENTER][0]
supplemental_precip.nx_local = supplemental_precip.x_upper_bound - supplemental_precip.x_lower_bound
supplemental_precip.ny_local = supplemental_precip.y_upper_bound - supplemental_precip.y_lower_bound
except (ValueError, KeyError, AttributeError) as err:
config_options.errMsg = "Unable to extract local X/Y boundaries from global grid from temporary " + \
"file: " + tmp_file + " (" + str(err) + ")"
err_handler.err_out(config_options)
# mpi_config.comm.barrier()
# Check to make sure we have enough dimensionality to run regridding. ESMF requires both grids
# to have a size of at least 2.
if supplemental_precip.nx_local < 2 or supplemental_precip.ny_local < 2:
config_options.errMsg = "You have either specified too many cores for: " + supplemental_precip.productName + \
", or your input forcing grid is too small to process. Local grid " \
"must have x/y dimension size of 2."
err_handler.log_critical(config_options, mpi_config)
err_handler.check_program_status(config_options, mpi_config)
lat_tmp = lon_tmp = None
if mpi_config.rank == 0:
# Process lat/lon values from the GFS grid.
if len(id_tmp.variables['latitude'].shape) == 3:
# We have 2D grids already in place.
lat_tmp = id_tmp.variables['latitude'][0, :, :]
lon_tmp = id_tmp.variables['longitude'][0, :, :]
elif len(id_tmp.variables['longitude'].shape) == 2:
# We have 2D grids already in place.
lat_tmp = id_tmp.variables['latitude'][:, :]
lon_tmp = id_tmp.variables['longitude'][:, :]
elif len(id_tmp.variables['latitude'].shape) == 1:
# We have 1D lat/lons we need to translate into
# 2D grids.
lat_tmp = np.repeat(id_tmp.variables['latitude'][:][:, np.newaxis], supplemental_precip.nx_global, axis=1)
lon_tmp = np.tile(id_tmp.variables['longitude'][:], (supplemental_precip.ny_global, 1))
# mpi_config.comm.barrier()
# Scatter global GFS latitude grid to processors..
if mpi_config.rank == 0:
var_tmp = lat_tmp
else:
var_tmp = None
var_sub_lat_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options)
# mpi_config.comm.barrier()
if mpi_config.rank == 0:
var_tmp = lon_tmp
else:
var_tmp = None
var_sub_lon_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options)
# mpi_config.comm.barrier()
try:
supplemental_precip.esmf_lats = supplemental_precip.esmf_grid_in.get_coords(1)
except ESMF.GridException as ge:
config_options.errMsg = "Unable to locate latitude coordinate object within supplemental precip ESMF grid: " \
+ str(ge)
err_handler.err_out(config_options)
# mpi_config.comm.barrier()
try:
supplemental_precip.esmf_lons = supplemental_precip.esmf_grid_in.get_coords(0)
except ESMF.GridException as ge:
config_options.errMsg = "Unable to locate longitude coordinate object within supplemental precip ESMF grid: " \
+ str(ge)
err_handler.err_out(config_options)
# mpi_config.comm.barrier()
supplemental_precip.esmf_lats[:, :] = var_sub_lat_tmp
supplemental_precip.esmf_lons[:, :] = var_sub_lon_tmp
del var_sub_lat_tmp
del var_sub_lon_tmp
del lat_tmp
del lon_tmp
# Create a ESMF field to hold the incoming data.
supplemental_precip.esmf_field_in = ESMF.Field(supplemental_precip.esmf_grid_in,
name=supplemental_precip.productName + "_NATIVE")
# mpi_config.comm.barrier()
# Scatter global grid to processors..
if mpi_config.rank == 0:
var_tmp = id_tmp[supplemental_precip.netcdf_var_names[0]][0, :, :]
# Set all valid values to 1.0, and all missing values to 0.0. This will
# be used to generate an output mask that is used later on in downscaling, layering,
# etc.
var_tmp[:, :] = 1.0
else:
var_tmp = None
var_sub_tmp = mpi_config.scatter_array(supplemental_precip, var_tmp, config_options)
mpi_config.comm.barrier()
# Place temporary data into the field array for generating the regridding object.
supplemental_precip.esmf_field_in.data[:, :] = var_sub_tmp
# mpi_config.comm.barrier()
supplemental_precip.regridObj = ESMF.Regrid(supplemental_precip.esmf_field_in,
supplemental_precip.esmf_field_out,
src_mask_values=np.array([0]),
regrid_method=ESMF.RegridMethod.BILINEAR,
unmapped_action=ESMF.UnmappedAction.IGNORE)
# Run the regridding object on this test dataset. Check the output grid for
# any 0 values.
supplemental_precip.esmf_field_out = supplemental_precip.regridObj(supplemental_precip.esmf_field_in,
supplemental_precip.esmf_field_out)
supplemental_precip.regridded_mask[:, :] = supplemental_precip.esmf_field_out.data[:, :]
| 53.544573 | 120 | 0.641039 | 17,184 | 143,553 | 5.054702 | 0.033985 | 0.118835 | 0.083813 | 0.115243 | 0.917453 | 0.900979 | 0.886703 | 0.869491 | 0.859061 | 0.848895 | 0 | 0.006103 | 0.283206 | 143,553 | 2,680 | 121 | 53.564552 | 0.838014 | 0.18706 | 0 | 0.754266 | 0 | 0 | 0.087008 | 0.000571 | 0.003413 | 0 | 0 | 0.000373 | 0 | 1 | 0.00967 | false | 0 | 0.005688 | 0 | 0.02901 | 0.002844 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ed86ae356150ebff5c0ded68773f6d6c62354ec5 | 112,521 | py | Python | test/integration/component/test_vpc_vm_life_cycle.py | lafferty/cshv3 | ee0ff7ac240bd24e19db6bd3fb9869dd087442ba | [
"Apache-2.0"
] | 2 | 2015-05-19T05:04:30.000Z | 2016-09-07T00:33:17.000Z | test/integration/component/test_vpc_vm_life_cycle.py | lafferty/cshv3 | ee0ff7ac240bd24e19db6bd3fb9869dd087442ba | [
"Apache-2.0"
] | null | null | null | test/integration/component/test_vpc_vm_life_cycle.py | lafferty/cshv3 | ee0ff7ac240bd24e19db6bd3fb9869dd087442ba | [
"Apache-2.0"
] | 2 | 2017-07-07T14:49:03.000Z | 2018-07-31T06:38:42.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Component tests VM life cycle in VPC network functionality
"""
#Import Local Modules
import marvin
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.integration.lib.utils import *
from marvin.integration.lib.base import *
from marvin.integration.lib.common import *
from marvin.remoteSSHClient import remoteSSHClient
class Services:
"""Test VM life cycle in VPC network services
"""
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 128,
},
"service_offering_1": {
"name": "Tiny Instance- tagged host 1",
"displaytext": "Tiny off-tagged host2",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 128,
"tags": "HOST_TAGS_HERE"
},
"service_offering_2": {
"name": "Tiny Instance- tagged host 2",
"displaytext": "Tiny off-tagged host2",
"cpunumber": 1,
"cpuspeed": 100,
"memory": 128,
"tags": "HOST_TAGS_HERE"
},
"network_offering": {
"name": 'VPC Network offering',
"displaytext": 'VPC Network off',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Lb,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"Lb": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
"serviceCapabilityList": {
"SourceNat": {"SupportedSourceNatTypes": "peraccount"},
"Lb": {"lbSchemes": "public", "SupportedLbIsolation": "dedicated"}
},
},
"network_offering_no_lb": {
"name": 'VPC Network offering no LB',
"displaytext": 'VPC Network off no LB',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,UserData,StaticNat,NetworkACL',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"serviceProviderList": {
"Dhcp": 'VpcVirtualRouter',
"Dns": 'VpcVirtualRouter',
"SourceNat": 'VpcVirtualRouter',
"PortForwarding": 'VpcVirtualRouter',
"UserData": 'VpcVirtualRouter',
"StaticNat": 'VpcVirtualRouter',
"NetworkACL": 'VpcVirtualRouter'
},
},
"network_off_shared": {
"name": 'Shared Network offering',
"displaytext": 'Shared Network offering',
"guestiptype": 'Shared',
"traffictype": 'GUEST',
"availability": 'Optional',
"useVpc": 'on',
"specifyIpRanges": True,
"specifyVlan": True
},
"vpc_offering": {
"name": 'VPC off',
"displaytext": 'VPC off',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding,Lb,UserData,StaticNat',
},
"vpc": {
"name": "TestVPC",
"displaytext": "TestVPC",
"cidr": '10.0.0.1/24'
},
"network": {
"name": "Test Network",
"displaytext": "Test Network",
"netmask": '255.255.255.0',
"limit": 5,
# Max networks allowed as per hypervisor
# Xenserver -> 5, VMWare -> 9
},
"lbrule": {
"name": "SSH",
"alg": "leastconn",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 22,
"openfirewall": False,
"startport": 22,
"endport": 22,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"natrule": {
"privateport": 22,
"publicport": 22,
"startport": 22,
"endport": 22,
"protocol": "TCP",
"cidrlist": '0.0.0.0/0',
},
"fw_rule": {
"startport": 1,
"endport": 6000,
"cidr": '0.0.0.0/0',
# Any network (For creating FW rule)
"protocol": "TCP"
},
"icmp_rule": {
"icmptype": -1,
"icmpcode": -1,
"cidrlist": '0.0.0.0/0',
"protocol": "ICMP"
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
"userdata": 'This is sample data',
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
"mode": 'advanced'
}
class TestVMLifeCycleVPC(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestVMLifeCycleVPC,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.vpc_off = VpcOffering.create(
cls.api_client,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.api_client, state='Enabled')
cls.services["vpc"]["cidr"] = '10.1.1.1/16'
cls.vpc = VPC.create(
cls.api_client,
cls.services["vpc"],
vpcofferingid=cls.vpc_off.id,
zoneid=cls.zone.id,
account=cls.account.name,
domainid=cls.account.domainid
)
cls.nw_off = NetworkOffering.create(
cls.api_client,
cls.services["network_offering"],
conservemode=False
)
# Enable Network offering
cls.nw_off.update(cls.api_client, state='Enabled')
# Creating network using the network offering created
cls.network_1 = Network.create(
cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.nw_off.id,
zoneid=cls.zone.id,
gateway='10.1.1.1',
vpcid=cls.vpc.id
)
cls.nw_off_no_lb = NetworkOffering.create(
cls.api_client,
cls.services["network_offering_no_lb"],
conservemode=False
)
# Enable Network offering
cls.nw_off_no_lb.update(cls.api_client, state='Enabled')
# Spawn an instance in that network
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id)]
)
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id)]
)
cls.public_ip_1 = PublicIPAddress.create(
cls.api_client,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
cls.lb_rule = LoadBalancerRule.create(
cls.api_client,
cls.services["lbrule"],
ipaddressid=cls.public_ip_1.ipaddress.id,
accountid=cls.account.name,
networkid=cls.network_1.id,
vpcid=cls.vpc.id,
domainid=cls.account.domainid
)
cls.lb_rule.assign(cls.api_client, [cls.vm_1, cls.vm_2])
cls.public_ip_2 = PublicIPAddress.create(
cls.api_client,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
cls.nat_rule = NATRule.create(
cls.api_client,
cls.vm_1,
cls.services["natrule"],
ipaddressid=cls.public_ip_2.ipaddress.id,
openfirewall=False,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
# Opening up the ports in VPC
cls.nwacl_nat = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["natrule"],
traffictype='Ingress'
)
cls.nwacl_lb = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["lbrule"],
traffictype='Ingress'
)
cls.nwacl_internet_1 = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["icmp_rule"],
traffictype='Egress'
)
cls._cleanup = [
cls.account,
cls.service_offering,
cls.nw_off,
cls.nw_off_no_lb
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def validate_vpc_offering(self, vpc_offering):
"""Validates the VPC offering"""
self.debug("Check if the VPC offering is created successfully?")
vpc_offs = VpcOffering.list(
self.apiclient,
id=vpc_offering.id
)
self.assertEqual(
isinstance(vpc_offs, list),
True,
"List VPC offerings should return a valid list"
)
self.assertEqual(
vpc_offering.name,
vpc_offs[0].name,
"Name of the VPC offering should match with listVPCOff data"
)
self.debug(
"VPC offering is created successfully - %s" %
vpc_offering.name)
return
def validate_vpc_network(self, network, state=None):
"""Validates the VPC network"""
self.debug("Check if the VPC network is created successfully?")
vpc_networks = VPC.list(
self.apiclient,
id=network.id
)
self.assertEqual(
isinstance(vpc_networks, list),
True,
"List VPC network should return a valid list"
)
self.assertEqual(
network.name,
vpc_networks[0].name,
"Name of the VPC network should match with listVPC data"
)
if state:
self.assertEqual(
vpc_networks[0].state,
state,
"VPC state should be '%s'" % state
)
self.debug("VPC network validated - %s" % network.name)
return
def validate_network_rules(self):
"""Validates if the network rules work properly or not?"""
try:
self.debug("Checking if we can SSH into VM_1 through %s?" %
(self.public_ip_1.ipaddress.ipaddress))
ssh_1 = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
self.debug("Verifying if we can ping to outside world from VM?")
# Ping to outsite world
res = ssh_1.execute("ping -c 1 www.google.com")
# res = 64 bytes from maa03s17-in-f20.1e100.net (74.125.236.212):
# icmp_req=1 ttl=57 time=25.9 ms
# --- www.l.google.com ping statistics ---
# 1 packets transmitted, 1 received, 0% packet loss, time 0ms
# rtt min/avg/max/mdev = 25.970/25.970/25.970/0.000 ms
except Exception as e:
self.fail("Failed to SSH into VM - %s, %s" %
(self.public_ip_1.ipaddress.ipaddress, e))
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to outside world from VM should be successful"
)
self.debug("Checking if we can SSH into VM_1 through %s?" %
(self.public_ip_2.ipaddress.ipaddress))
try:
ssh_2 = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_2.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
self.debug("Verifying if we can ping to outside world from VM?")
res = ssh_2.execute("ping -c 1 www.google.com")
except Exception as e:
self.fail("Failed to SSH into VM - %s, %s" %
(self.public_ip_2.ipaddress.ipaddress, e))
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to outside world from VM should be successful"
)
return
@attr(tags=["advanced", "intervlan"])
def test_01_deploy_instance_in_network(self):
""" Test deploy an instance in VPC networks
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# Steps:
# 1. Deploy vm1 and vm2 in network1 and vm3 and vm4 in network2 using
# the default CentOS 6.2 Template
self.debug("Check if deployed VMs are in running state?")
vms = VirtualMachine.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List VMs should return a valid response"
)
for vm in vms:
self.debug("VM name: %s, VM state: %s" % (vm.name, vm.state))
self.assertEqual(
vm.state,
"Running",
"Vm state should be running for each VM deployed"
)
return
@attr(tags=["advanced", "intervlan"])
def test_02_stop_instance_in_network(self):
""" Test stop an instance in VPC networks
"""
# Validate the following
# 1. Stop the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Stopping the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.stop(self.apiclient)
self.vm_2.stop(self.apiclient)
except Exception as e:
self.fail("Failed to stop the virtual instances, %s" % e)
# Check if the network rules still exists after Vm stop
self.debug("Checking if NAT rules ")
nat_rules = NATRule.list(
self.apiclient,
id=self.nat_rule.id,
listall=True
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules shall return a valid list"
)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=self.lb_rule.id,
listall=True
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules shall return a valid list"
)
return
@attr(tags=["advanced", "intervlan"])
def test_03_start_instance_in_network(self):
""" Test start an instance in VPC networks
"""
# Validate the following
# 1. Start the virtual machines.
# 2. Vm should be started successfully.
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Starting the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.start(self.apiclient)
self.vm_2.start(self.apiclient)
except Exception as e:
self.fail("Failed to start the virtual instances, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_04_reboot_instance_in_network(self):
""" Test reboot an instance in VPC networks
"""
# Validate the following
# 1. Reboot the virtual machines.
# 2. Vm should be started successfully.
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Starting the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.reboot(self.apiclient)
self.vm_2.reboot(self.apiclient)
except Exception as e:
self.fail("Failed to reboot the virtual instances, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_05_destroy_instance_in_network(self):
""" Test destroy an instance in VPC networks
"""
# Validate the following
# 1. Destory the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Destroying the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.delete(self.apiclient)
self.vm_2.delete(self.apiclient)
except Exception as e:
self.fail("Failed to stop the virtual instances, %s" % e)
# Check if the network rules still exists after Vm stop
self.debug("Checking if NAT rules ")
nat_rules = NATRule.list(
self.apiclient,
id=self.nat_rule.id,
listall=True
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules shall return a valid list"
)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=self.lb_rule.id,
listall=True
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules shall return a valid list"
)
return
@attr(tags=["advanced", "intervlan"])
def test_06_recover_instance_in_network(self):
""" Test recover an instance in VPC networks
"""
# Validate the following
# 1. Recover the virtual machines.
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Recovering the expunged virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.recover(self.apiclient)
self.vm_2.recover(self.apiclient)
except Exception as e:
self.fail("Failed to recover the virtual instances, %s" % e)
self.debug("Starting the two instances..")
try:
self.vm_1.start(self.apiclient)
self.vm_2.start(self.apiclient)
except Exception as e:
self.fail("Failed to start the instances, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_07_migrate_instance_in_network(self):
""" Test migrate an instance in VPC networks
"""
# Validate the following
# 1. Migrate the virtual machines to other hosts
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Checking if the host is available for migration?")
hosts = Host.list(
self.apiclient,
zoneid=self.zone.id,
type='Routing'
)
self.assertEqual(
isinstance(hosts, list),
True,
"List hosts should return a valid list"
)
if len(hosts) < 2:
raise unittest.SkipTest(
"No host available for migration. Test requires atleast 2 hosts")
# Remove the host of current VM from the hosts list
hosts[:] = [host for host in hosts if host.id != self.vm_1.hostid]
host = hosts[0]
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Migrating VM-ID: %s to Host: %s" % (
self.vm_1.id,
host.id
))
try:
self.vm_1.migrate(self.apiclient, hostid=host.id)
except Exception as e:
self.fail("Failed to migrate instance, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_08_user_data(self):
""" Test user data in virtual machines
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# 3. Deploy a vm in network1 and a vm in network2 using userdata
# Steps
# 1.Query for the user data for both the user vms from both networks
# User should be able to query the user data for the vms belonging to
# both the networks from the VR
try:
ssh = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
except Exception as e:
self.fail("Failed to SSH into instance")
# Find router associated with user account
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"Check list response returns a valid list"
)
router = routers[0]
self.debug("check the userdata with that of present in router")
try:
cmds = [
"wget http://%s/latest/user-data" % router.guestipaddress,
"cat user-data",
]
for c in cmds:
result = ssh.execute(c)
self.debug("%s: %s" % (c, result))
except Exception as e:
self.fail("Failed to SSH in Virtual machine: %s" % e)
res = str(result)
self.assertEqual(
res.count(
self.services["virtual_machine"]["userdata"]),
1,
"Verify user data from router"
)
return
@attr(tags=["advanced", "intervlan"])
def test_09_meta_data(self):
""" Test meta data in virtual machines
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# 3. Deploy a vm in network1 and a vm in network2 using userdata
# Steps
# 1.Query for the meta data for both the user vms from both networks
# User should be able to query the user data for the vms belonging to
# both the networks from the VR
try:
ssh = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
except Exception as e:
self.fail("Failed to SSH into instance")
# Find router associated with user account
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"Check list response returns a valid list"
)
router = routers[0]
self.debug("check the metadata with that of present in router")
try:
cmds = [
"wget http://%s/latest/meta-data" % router.guestipaddress,
"cat user-data",
]
for c in cmds:
result = ssh.execute(c)
self.debug("%s: %s" % (c, result))
except Exception as e:
self.fail("Failed to SSH in Virtual machine: %s" % e)
res = str(result)
self.assertNotEqual(
res,
None,
"Meta data should be returned from router"
)
return
@attr(tags=["advanced", "intervlan"])
def test_10_expunge_instance_in_network(self):
""" Test expunge an instance in VPC networks
"""
# Validate the following
# 1. Recover the virtual machines.
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Delete virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.delete(self.apiclient)
self.vm_2.delete(self.apiclient)
except Exception as e:
self.fail("Failed to destroy the virtual instances, %s" % e)
self.debug(
"Waiting for expunge interval to cleanup the network and VMs")
wait_for_cleanup(
self.apiclient,
["expunge.interval", "expunge.delay"]
)
# Check if the network rules still exists after Vm stop
self.debug("Checking if NAT rules ")
nat_rules = NATRule.list(
self.apiclient,
id=self.nat_rule.id,
listall=True
)
self.assertEqual(
nat_rules,
None,
"List NAT rules should not return anything"
)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=self.lb_rule.id,
listall=True
)
self.assertEqual(
lb_rules,
None,
"List LB rules should not return anything"
)
return
class TestVMLifeCycleSharedNwVPC(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestVMLifeCycleSharedNwVPC,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.vpc_off = VpcOffering.create(
cls.api_client,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.api_client, state='Enabled')
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.vpc_off = VpcOffering.create(
cls.api_client,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.api_client, state='Enabled')
cls.services["vpc"]["cidr"] = '10.1.1.1/16'
cls.vpc = VPC.create(
cls.api_client,
cls.services["vpc"],
vpcofferingid=cls.vpc_off.id,
zoneid=cls.zone.id,
account=cls.account.name,
domainid=cls.account.domainid
)
cls.nw_off = NetworkOffering.create(
cls.api_client,
cls.services["network_offering"],
conservemode=False
)
# Enable Network offering
cls.nw_off.update(cls.api_client, state='Enabled')
# Creating network using the network offering created
cls.network_1 = Network.create(
cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.nw_off.id,
zoneid=cls.zone.id,
gateway='10.1.1.1',
vpcid=cls.vpc.id
)
cls.nw_off_no_lb = NetworkOffering.create(
cls.api_client,
cls.services["network_offering_no_lb"],
conservemode=False
)
cls.shared_nw_off = NetworkOffering.create(
cls.api_client,
cls.services["network_off_shared"],
conservemode=False
)
# Enable Network offering
cls.shared_nw_off.update(cls.api_client, state='Enabled')
# Creating network using the network offering created
cls.network_2 = Network.create(
cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.shared_nw_off.id,
zoneid=cls.zone.id,
gateway='10.1.2.1',
)
# Spawn an instance in that network
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id),
str(cls.network_2.id)]
)
# Spawn an instance in that network
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id),
str(cls.network_2.id)]
)
cls.vm_3 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id),
str(cls.network_2.id)]
)
cls.public_ip_1 = PublicIPAddress.create(
cls.api_client,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
cls.lb_rule = LoadBalancerRule.create(
cls.api_client,
cls.services["lbrule"],
ipaddressid=cls.public_ip_1.ipaddress.id,
accountid=cls.account.name,
networkid=cls.network_1.id,
vpcid=cls.vpc.id,
domainid=cls.account.domainid
)
cls.lb_rule.assign(cls.api_client, [cls.vm_1, cls.vm_2, cls.vm_3])
cls.public_ip_2 = PublicIPAddress.create(
cls.api_client,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
cls.nat_rule = NATRule.create(
cls.api_client,
cls.vm_1,
cls.services["natrule"],
ipaddressid=cls.public_ip_2.ipaddress.id,
openfirewall=False,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
# Opening up the ports in VPC
cls.nwacl_nat = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["natrule"],
traffictype='Ingress'
)
cls.nwacl_lb = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["lbrule"],
traffictype='Ingress'
)
cls.nwacl_internet_1 = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["icmp_rule"],
traffictype='Egress'
)
cls._cleanup = [
cls.account,
cls.service_offering,
cls.nw_off,
cls.shared_nw_off,
cls.vpc_off
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def validate_vpc_offering(self, vpc_offering):
"""Validates the VPC offering"""
self.debug("Check if the VPC offering is created successfully?")
vpc_offs = VpcOffering.list(
self.apiclient,
id=vpc_offering.id
)
self.assertEqual(
isinstance(vpc_offs, list),
True,
"List VPC offerings should return a valid list"
)
self.assertEqual(
vpc_offering.name,
vpc_offs[0].name,
"Name of the VPC offering should match with listVPCOff data"
)
self.debug(
"VPC offering is created successfully - %s" %
vpc_offering.name)
return
def validate_vpc_network(self, network, state=None):
"""Validates the VPC network"""
self.debug("Check if the VPC network is created successfully?")
vpc_networks = VPC.list(
self.apiclient,
id=network.id
)
self.assertEqual(
isinstance(vpc_networks, list),
True,
"List VPC network should return a valid list"
)
self.assertEqual(
network.name,
vpc_networks[0].name,
"Name of the VPC network should match with listVPC data"
)
if state:
self.assertEqual(
vpc_networks[0].state,
state,
"VPC state should be '%s'" % state
)
self.debug("VPC network validated - %s" % network.name)
return
def validate_network_rules(self):
"""Validating if the network rules (PF/LB) works properly or not?"""
try:
ssh_1 = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
self.debug("Verifying if we can ping to outside world from VM?")
# Ping to outsite world
res = ssh_1.execute("ping -c 1 www.google.com")
# res = 64 bytes from maa03s17-in-f20.1e100.net (74.125.236.212):
# icmp_req=1 ttl=57 time=25.9 ms
# --- www.l.google.com ping statistics ---
# 1 packets transmitted, 1 received, 0% packet loss, time 0ms
# rtt min/avg/max/mdev = 25.970/25.970/25.970/0.000 ms
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to outside world from VM should be successful"
)
self.debug("We should be allowed to ping virtual gateway")
self.debug("VM gateway: %s" % self.vm_1.nic[0].gateway)
res = ssh_1.execute("ping -c 1 %s" % self.vm_1.nic[0].gateway)
self.debug("ping -c 1 %s: %s" % (self.vm_1.nic[0].gateway, res))
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to VM gateway should be successful"
)
except Exception as e:
self.fail("Failed to SSH into VM - %s, %s" %
(self.public_ip_1.ipaddress.ipaddress, e))
return
@attr(tags=["advanced", "intervlan"])
def test_01_deploy_instance_in_network(self):
""" Test deploy an instance in VPC networks
"""
# Validate the following
# 1. Successful deployment of the User VM.
# 2. Ping any host in the public Internet successfully.
# 3. Ping the gateways of the VPC's guest network and the
# Shared Guest Network successfully.
self.debug("Check if deployed VMs are in running state?")
vms = VirtualMachine.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List VMs should return a valid response"
)
for vm in vms:
self.debug("VM name: %s, VM state: %s" % (vm.name, vm.state))
self.assertEqual(
vm.state,
"Running",
"Vm state should be running for each VM deployed"
)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_02_stop_instance_in_network(self):
""" Test stop an instance in VPC networks
"""
# Validate the following
# 1. Stop the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
self.debug("Stopping one of the virtual machines in account: %s" %
self.account.name)
try:
self.vm_2.stop(self.apiclient)
except Exception as e:
self.fail("Failed to stop the virtual instances, %s" % e)
self.debug("Check if the instance is in stopped state?")
vms = VirtualMachine.list(
self.apiclient,
id=self.vm_2.id,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List virtual machines should return a valid list"
)
vm = vms[0]
self.assertEqual(
vm.state,
"Stopped",
"Virtual machine should be in stopped state"
)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_03_start_instance_in_network(self):
""" Test start an instance in VPC networks
"""
# Validate the following
# 1. Start the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
self.debug("Starting one of the virtual machines in account: %s" %
self.account.name)
try:
self.vm_2.start(self.apiclient)
except Exception as e:
self.fail("Failed to start the virtual instances, %s" % e)
self.debug("Check if the instance is in stopped state?")
vms = VirtualMachine.list(
self.apiclient,
id=self.vm_2.id,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List virtual machines should return a valid list"
)
vm = vms[0]
self.assertEqual(
vm.state,
"Running",
"Virtual machine should be in running state"
)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_04_reboot_instance_in_network(self):
""" Test reboot an instance in VPC networks
"""
# Validate the following
# 1. Reboot the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
self.debug("Restarting the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.reboot(self.apiclient)
self.vm_2.reboot(self.apiclient)
except Exception as e:
self.fail("Failed to reboot the virtual instances, %s" % e)
self.debug("Check if the instance is in stopped state?")
vms = VirtualMachine.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List virtual machines should return a valid list"
)
for vm in vms:
self.assertEqual(
vm.state,
"Running",
"Virtual machine should be in running state"
)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_05_destroy_instance_in_network(self):
""" Test destroy an instance in VPC networks
"""
# Validate the following
# 1. Destroy one of the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Destroying one of the virtual machines in account: %s" %
self.account.name)
try:
self.vm_2.delete(self.apiclient)
except Exception as e:
self.fail("Failed to destroy the virtual instances, %s" % e)
self.debug("Check if the instance is in stopped state?")
vms = VirtualMachine.list(
self.apiclient,
id=self.vm_2.id,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List virtual machines should return a valid list"
)
vm = vms[0]
self.assertEqual(
vm.state,
"Expunging",
"Virtual machine should be in expunging state"
)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_06_recover_instance_in_network(self):
""" Test recover an instance in VPC networks
"""
# Validate the following
# 1. Recover the virtual machines.
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Recovering the expunged virtual machines in account: %s" %
self.account.name)
try:
self.vm_2.recover(self.apiclient)
except Exception as e:
self.fail("Failed to recover the virtual instances, %s" % e)
self.debug("Check if the instance is in stopped state?")
vms = VirtualMachine.list(
self.apiclient,
id=self.vm_2.id,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List virtual machines should return a valid list"
)
vm = vms[0]
self.assertEqual(
vm.state,
"Stopped",
"Virtual machine should be in stopped state"
)
self.debug("Starting the instance: %s" % self.vm_2.name)
try:
self.vm_2.start(self.apiclient)
except Exception as e:
self.fail("Failed to start the instances, %s" % e)
vms = VirtualMachine.list(
self.apiclient,
id=self.vm_2.id,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List virtual machines should return a valid list"
)
vm = vms[0]
self.assertEqual(
vm.state,
"Running",
"Virtual machine should be in running state"
)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_07_migrate_instance_in_network(self):
""" Test migrate an instance in VPC networks
"""
# Validate the following
# 1. Migrate the virtual machines to other hosts
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Checking if the host is available for migration?")
hosts = Host.list(
self.apiclient,
zoneid=self.zone.id,
type='Routing'
)
self.assertEqual(
isinstance(hosts, list),
True,
"List hosts should return a valid list"
)
if len(hosts) < 2:
raise unittest.SkipTest(
"No host available for migration. Test requires atleast 2 hosts")
# Remove the host of current VM from the hosts list
hosts[:] = [host for host in hosts if host.id != self.vm_1.hostid]
host = hosts[0]
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
self.debug("Migrating VM-ID: %s to Host: %s" % (
self.vm_1.id,
host.id
))
try:
self.vm_1.migrate(self.apiclient, hostid=host.id)
except Exception as e:
self.fail("Failed to migrate instance, %s" % e)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_08_user_data(self):
""" Test user data in virtual machines
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# 3. Deploy a vm in network1 and a vm in network2 using userdata
# Steps
# 1.Query for the user data for both the user vms from both networks
# User should be able to query the user data for the vms belonging to
# both the networks from the VR
try:
ssh = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
except Exception as e:
self.fail("Failed to SSH into instance")
# Find router associated with user account
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"Check list response returns a valid list"
)
router = routers[0]
self.debug("check the userdata with that of present in router")
try:
cmds = [
"wget http://%s/latest/user-data" % router.guestipaddress,
"cat user-data",
]
for c in cmds:
result = ssh.execute(c)
self.debug("%s: %s" % (c, result))
except Exception as e:
self.fail("Failed to SSH in Virtual machine: %s" % e)
res = str(result)
self.assertEqual(
res.count(
self.services["virtual_machine"]["userdata"]),
1,
"Verify user data from router"
)
return
@attr(tags=["advanced", "intervlan"])
def test_09_meta_data(self):
""" Test meta data in virtual machines
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# 3. Deploy a vm in network1 and a vm in network2 using userdata
# Steps
# 1.Query for the meta data for both the user vms from both networks
# User should be able to query the user data for the vms belonging to
# both the networks from the VR
try:
ssh = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
except Exception as e:
self.fail("Failed to SSH into instance")
# Find router associated with user account
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"Check list response returns a valid list"
)
router = routers[0]
self.debug("check the metadata with that of present in router")
try:
cmds = [
"wget http://%s/latest/meta-data" % router.guestipaddress,
"cat user-data",
]
for c in cmds:
result = ssh.execute(c)
self.debug("%s: %s" % (c, result))
except Exception as e:
self.fail("Failed to SSH in Virtual machine: %s" % e)
res = str(result)
self.assertNotEqual(
res,
None,
"Meta data should be returned from router"
)
return
@attr(tags=["advanced", "intervlan"])
def test_10_expunge_instance_in_network(self):
""" Test expunge an instance in VPC networks
"""
# Validate the following
# 1. Recover the virtual machines.
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
self.debug("Delete virtual machines in account: %s" %
self.account.name)
try:
self.vm_2.delete(self.apiclient)
except Exception as e:
self.fail("Failed to destroy the virtual instances, %s" % e)
self.debug(
"Waiting for expunge interval to cleanup the network and VMs")
wait_for_cleanup(
self.apiclient,
["expunge.interval", "expunge.delay"]
)
self.debug("Validating if network rules are coonfigured properly?")
self.validate_network_rules()
self.debug(
"Deleting the rest of the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.delete(self.apiclient)
self.vm_3.delete(self.apiclient)
except Exception as e:
self.fail("Failed to destroy the virtual instances, %s" % e)
self.debug(
"Waiting for expunge interval to cleanup the network and VMs")
wait_for_cleanup(
self.apiclient,
["expunge.interval", "expunge.delay"]
)
# Check if the network rules still exists after Vm stop
self.debug("Checking if NAT rules ")
nat_rules = NATRule.list(
self.apiclient,
id=self.nat_rule.id,
listall=True
)
self.assertEqual(
nat_rules,
None,
"List NAT rules should not return anything"
)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=self.lb_rule.id,
listall=True
)
self.assertEqual(
lb_rules,
None,
"List LB rules should not return anything"
)
return
class TestVMLifeCycleBothIsolated(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestVMLifeCycleBothIsolated,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.vpc_off = VpcOffering.create(
cls.api_client,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.api_client, state='Enabled')
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.vpc_off = VpcOffering.create(
cls.api_client,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.api_client, state='Enabled')
cls.services["vpc"]["cidr"] = '10.1.1.1/16'
cls.vpc = VPC.create(
cls.api_client,
cls.services["vpc"],
vpcofferingid=cls.vpc_off.id,
zoneid=cls.zone.id,
account=cls.account.name,
domainid=cls.account.domainid
)
cls.nw_off = NetworkOffering.create(
cls.api_client,
cls.services["network_offering"],
conservemode=False
)
# Enable Network offering
cls.nw_off.update(cls.api_client, state='Enabled')
# Creating network using the network offering created
cls.network_1 = Network.create(
cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.nw_off.id,
zoneid=cls.zone.id,
gateway='10.1.1.1',
vpcid=cls.vpc.id
)
cls.nw_off_no_lb = NetworkOffering.create(
cls.api_client,
cls.services["network_offering_no_lb"],
conservemode=False
)
# Enable Network offering
cls.nw_off_no_lb.update(cls.api_client, state='Enabled')
# Creating network using the network offering created
cls.network_2 = Network.create(
cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.nw_off_no_lb.id,
zoneid=cls.zone.id,
gateway='10.1.2.1',
vpcid=cls.vpc.id
)
cls._cleanup = [
cls.account,
cls.service_offering,
cls.nw_off,
cls.nw_off_no_lb,
cls.vpc_off
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def validate_vpc_offering(self, vpc_offering):
"""Validates the VPC offering"""
self.debug("Check if the VPC offering is created successfully?")
vpc_offs = VpcOffering.list(
self.apiclient,
id=vpc_offering.id
)
self.assertEqual(
isinstance(vpc_offs, list),
True,
"List VPC offerings should return a valid list"
)
self.assertEqual(
vpc_offering.name,
vpc_offs[0].name,
"Name of the VPC offering should match with listVPCOff data"
)
self.debug(
"VPC offering is created successfully - %s" %
vpc_offering.name)
return
def validate_vpc_network(self, network, state=None):
"""Validates the VPC network"""
self.debug("Check if the VPC network is created successfully?")
vpc_networks = VPC.list(
self.apiclient,
id=network.id
)
self.assertEqual(
isinstance(vpc_networks, list),
True,
"List VPC network should return a valid list"
)
self.assertEqual(
network.name,
vpc_networks[0].name,
"Name of the VPC network should match with listVPC data"
)
if state:
self.assertEqual(
vpc_networks[0].state,
state,
"VPC state should be '%s'" % state
)
self.debug("VPC network validated - %s" % network.name)
return
def validate_network_rules(self):
"""Validating if the network rules (PF/LB) works properly or not?"""
try:
ssh_1 = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
self.debug("Verifying if we can ping to outside world from VM?")
# Ping to outsite world
res = ssh_1.execute("ping -c 1 www.google.com")
# res = 64 bytes from maa03s17-in-f20.1e100.net (74.125.236.212):
# icmp_req=1 ttl=57 time=25.9 ms
# --- www.l.google.com ping statistics ---
# 1 packets transmitted, 1 received, 0% packet loss, time 0ms
# rtt min/avg/max/mdev = 25.970/25.970/25.970/0.000 ms
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to outside world from VM should be successful"
)
self.debug("We should be allowed to ping virtual gateway")
self.debug("VM gateway: %s" % self.vm_1.nic[0].gateway)
res = ssh_1.execute("ping -c 1 %s" % self.vm_1.nic[0].gateway)
self.debug("ping -c 1 %s: %s" % (self.vm_1.nic[0].gateway, res))
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to VM gateway should be successful"
)
except Exception as e:
self.fail("Failed to SSH into VM - %s, %s" %
(self.public_ip_1.ipaddress.ipaddress, e))
return
@attr(tags=["advanced", "intervlan"])
def test_01_deploy_vm_two_isolated_nw(self):
""" Test deploy virtual machine in two isolated networks"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# Steps:
# 1. Deploy a VM such that the VM is part of both networks-network1
# and network2. Fail to deploy a VM.
self.debug("Validating the VPC offering created")
self.validate_vpc_offering(self.vpc_off)
self.debug("Validating VPC created in setup class")
self.validate_vpc_network(self.vpc)
self.debug("Deploying virtual machine in two isolated networks")
with self.assertRaises(Exception):
VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network_1.id),
str(self.network_2.id)]
)
self.debug("Deploy VM in 2 isolated networks failed")
return
@attr(tags=["advanced", "intervlan"])
def test_02_deploy_vm_vpcvr_stopped(self):
""" Test deploy virtual machine when VPC VR in stopped state"""
# Validate the following
# Pre-Req:
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) to this VPC.
# 3. Stop the VPC Virtual Router
# Steps:
# 1. Deploy a VM using the default CentOS 6.2 Template
self.debug("Finding the virtual router for vpc: %s" % self.vpc.id)
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List routers should return router for vpc: %s" %
self.vpc.id
)
router = routers[0]
self.debug("Check state of VPC virtual router, state: %s" %
router.state)
if router.state == "Running":
self.debug("Router state is running, stop it!")
Router.stop(self.apiclient, id=router.id)
self.debug("Check the router state again")
routers = Router.list(
self.apiclient,
id=router.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List routers should return router for vpc: %s" %
self.vpc.id
)
router = routers[0]
self.debug("router.state %s" %
router.state)
self.assertEqual(
router.state,
"Stopped",
"Router state should be stopped"
)
self.debug("Deploy an instance in network: %s with stopped VPCVR" %
self.network_1.name)
try:
vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
networkids=[str(self.network_1.id)]
)
except Exception as e:
self.fail("Failed to deploy the virtual instance: %s" % e)
self.debug("Verify the deployment of virtual instace")
vms = VirtualMachine.list(
self.apiclient,
id=vm.id,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List vms shall return a valid resposnse"
)
vm_response = vms[0]
self.assertEqual(
vm_response.state,
"Running",
"VM state should be running after deployment"
)
return
class TestVMLifeCycleStoppedVPCVR(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestVMLifeCycleStoppedVPCVR,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.vpc_off = VpcOffering.create(
cls.api_client,
cls.services["vpc_offering"]
)
cls.vpc_off.update(cls.api_client, state='Enabled')
cls.services["vpc"]["cidr"] = '10.1.1.1/16'
cls.vpc = VPC.create(
cls.api_client,
cls.services["vpc"],
vpcofferingid=cls.vpc_off.id,
zoneid=cls.zone.id,
account=cls.account.name,
domainid=cls.account.domainid
)
cls.nw_off = NetworkOffering.create(
cls.api_client,
cls.services["network_offering"],
conservemode=False
)
# Enable Network offering
cls.nw_off.update(cls.api_client, state='Enabled')
# Creating network using the network offering created
cls.network_1 = Network.create(
cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.nw_off.id,
zoneid=cls.zone.id,
gateway='10.1.1.1',
vpcid=cls.vpc.id
)
cls.nw_off_no_lb = NetworkOffering.create(
cls.api_client,
cls.services["network_offering_no_lb"],
conservemode=False
)
# Enable Network offering
cls.nw_off_no_lb.update(cls.api_client, state='Enabled')
# Creating network using the network offering created
cls.network_2 = Network.create(
cls.api_client,
cls.services["network"],
accountid=cls.account.name,
domainid=cls.account.domainid,
networkofferingid=cls.nw_off_no_lb.id,
zoneid=cls.zone.id,
gateway='10.1.2.1',
vpcid=cls.vpc.id
)
# Spawn an instance in that network
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id)]
)
# Spawn an instance in that network
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_1.id)]
)
cls.vm_3 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id,
networkids=[str(cls.network_2.id)]
)
cls.public_ip_1 = PublicIPAddress.create(
cls.api_client,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
cls.lb_rule = LoadBalancerRule.create(
cls.api_client,
cls.services["lbrule"],
ipaddressid=cls.public_ip_1.ipaddress.id,
accountid=cls.account.name,
networkid=cls.network_1.id,
vpcid=cls.vpc.id,
domainid=cls.account.domainid
)
cls.lb_rule.assign(cls.api_client, [cls.vm_1, cls.vm_2])
cls.public_ip_2 = PublicIPAddress.create(
cls.api_client,
accountid=cls.account.name,
zoneid=cls.zone.id,
domainid=cls.account.domainid,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
cls.nat_rule = NATRule.create(
cls.api_client,
cls.vm_1,
cls.services["natrule"],
ipaddressid=cls.public_ip_2.ipaddress.id,
openfirewall=False,
networkid=cls.network_1.id,
vpcid=cls.vpc.id
)
# Opening up the ports in VPC
cls.nwacl_nat = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["natrule"],
traffictype='Ingress'
)
cls.nwacl_lb = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["lbrule"],
traffictype='Ingress'
)
cls.nwacl_internet = NetworkACL.create(
cls.api_client,
networkid=cls.network_1.id,
services=cls.services["icmp_rule"],
traffictype='Egress'
)
cls._cleanup = [
cls.account,
cls.service_offering,
cls.nw_off,
cls.nw_off_no_lb
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.debug("Check the status of VPC virtual router")
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
if not isinstance(routers, list):
raise Exception("No response from list routers API")
self.router = routers[0]
if self.router.state == "Running":
Router.stop(self.apiclient, id=self.router.id)
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def validate_vpc_offering(self, vpc_offering):
"""Validates the VPC offering"""
self.debug("Check if the VPC offering is created successfully?")
vpc_offs = VpcOffering.list(
self.apiclient,
id=vpc_offering.id
)
self.assertEqual(
isinstance(vpc_offs, list),
True,
"List VPC offerings should return a valid list"
)
self.assertEqual(
vpc_offering.name,
vpc_offs[0].name,
"Name of the VPC offering should match with listVPCOff data"
)
self.debug(
"VPC offering is created successfully - %s" %
vpc_offering.name)
return
def validate_vpc_network(self, network, state=None):
"""Validates the VPC network"""
self.debug("Check if the VPC network is created successfully?")
vpc_networks = VPC.list(
self.apiclient,
id=network.id
)
self.assertEqual(
isinstance(vpc_networks, list),
True,
"List VPC network should return a valid list"
)
self.assertEqual(
network.name,
vpc_networks[0].name,
"Name of the VPC network should match with listVPC data"
)
if state:
self.assertEqual(
vpc_networks[0].state,
state,
"VPC state should be '%s'" % state
)
self.debug("VPC network validated - %s" % network.name)
return
def validate_network_rules(self):
"""Validates if the network rules work properly or not?"""
try:
ssh_1 = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
self.debug("Verifying if we can ping to outside world from VM?")
# Ping to outsite world
res = ssh_1.execute("ping -c 1 www.google.com")
# res = 64 bytes from maa03s17-in-f20.1e100.net (74.125.236.212):
# icmp_req=1 ttl=57 time=25.9 ms
# --- www.l.google.com ping statistics ---
# 1 packets transmitted, 1 received, 0% packet loss, time 0ms
# rtt min/avg/max/mdev = 25.970/25.970/25.970/0.000 ms
except Exception as e:
self.fail("Failed to SSH into VM - %s, %s" %
(self.public_ip_1.ipaddress.ipaddress, e))
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to outside world from VM should be successful"
)
self.debug("Checking if we can SSH into VM_1?")
try:
ssh_2 = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_2.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
self.debug("Verifying if we can ping to outside world from VM?")
res = ssh_2.execute("ping -c 1 www.google.com")
except Exception as e:
self.fail("Failed to SSH into VM - %s, %s" %
(self.public_ip_2.ipaddress.ipaddress, e))
result = str(res)
self.assertEqual(
result.count("1 received"),
1,
"Ping to outside world from VM should be successful"
)
return
@attr(tags=["advanced", "intervlan"])
def test_01_deploy_instance_in_network(self):
""" Test deploy an instance in VPC networks
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# Steps:
# 1. Deploy vm1 and vm2 in network1 and vm3 and vm4 in network2 using
# the default CentOS 6.2 Template
self.debug("Check if deployed VMs are in running state?")
vms = VirtualMachine.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List VMs should return a valid response"
)
for vm in vms:
self.debug("VM name: %s, VM state: %s" % (vm.name, vm.state))
self.assertEqual(
vm.state,
"Running",
"Vm state should be running for each VM deployed"
)
return
@attr(tags=["advanced", "intervlan"])
def test_02_stop_instance_in_network(self):
""" Test stop an instance in VPC networks
"""
# Validate the following
# 1. Stop the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Stopping the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.stop(self.apiclient)
self.vm_2.stop(self.apiclient)
except Exception as e:
self.fail("Failed to stop the virtual instances, %s" % e)
# Check if the network rules still exists after Vm stop
self.debug("Checking if NAT rules ")
nat_rules = NATRule.list(
self.apiclient,
id=self.nat_rule.id,
listall=True
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules shall return a valid list"
)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=self.lb_rule.id,
listall=True
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules shall return a valid list"
)
return
@attr(tags=["advanced", "intervlan"])
def test_03_start_instance_in_network(self):
""" Test start an instance in VPC networks
"""
# Validate the following
# 1. Start the virtual machines.
# 2. Vm should be started successfully.
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Starting the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.start(self.apiclient)
self.vm_2.start(self.apiclient)
except Exception as e:
self.fail("Failed to start the virtual instances, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_04_reboot_instance_in_network(self):
""" Test reboot an instance in VPC networks
"""
# Validate the following
# 1. Reboot the virtual machines.
# 2. Vm should be started successfully.
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Starting the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.reboot(self.apiclient)
self.vm_2.reboot(self.apiclient)
except Exception as e:
self.fail("Failed to reboot the virtual instances, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_05_destroy_instance_in_network(self):
""" Test destroy an instance in VPC networks
"""
# Validate the following
# 1. Destory the virtual machines.
# 2. Rules should be still configured on virtual router.
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Destroying the virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.delete(self.apiclient)
self.vm_2.delete(self.apiclient)
except Exception as e:
self.fail("Failed to stop the virtual instances, %s" % e)
# Check if the network rules still exists after Vm stop
self.debug("Checking if NAT rules ")
nat_rules = NATRule.list(
self.apiclient,
id=self.nat_rule.id,
listall=True
)
self.assertEqual(
isinstance(nat_rules, list),
True,
"List NAT rules shall return a valid list"
)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=self.lb_rule.id,
listall=True
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"List LB rules shall return a valid list"
)
return
@attr(tags=["advanced", "intervlan"])
def test_06_recover_instance_in_network(self):
""" Test recover an instance in VPC networks
"""
# Validate the following
# 1. Recover the virtual machines.
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Recovering the expunged virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.recover(self.apiclient)
self.vm_2.recover(self.apiclient)
except Exception as e:
self.fail("Failed to recover the virtual instances, %s" % e)
self.debug("Starting the two instances..")
try:
self.vm_1.start(self.apiclient)
self.vm_2.start(self.apiclient)
except Exception as e:
self.fail("Failed to start the instances, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_07_migrate_instance_in_network(self):
""" Test migrate an instance in VPC networks
"""
# Validate the following
# 1. Migrate the virtual machines to other hosts
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Checking if the host is available for migration?")
hosts = Host.list(
self.apiclient,
zoneid=self.zone.id,
type='Routing'
)
self.assertEqual(
isinstance(hosts, list),
True,
"List hosts should return a valid list"
)
if len(hosts) < 2:
raise unittest.SkipTest(
"No host available for migration. Test requires atleast 2 hosts")
# Remove the host of current VM from the hosts list
hosts[:] = [host for host in hosts if host.id != self.vm_1.hostid]
host = hosts[0]
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Migrating VM-ID: %s to Host: %s" % (
self.vm_1.id,
host.id
))
try:
self.vm_1.migrate(self.apiclient, hostid=host.id)
except Exception as e:
self.fail("Failed to migrate instance, %s" % e)
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
return
@attr(tags=["advanced", "intervlan"])
def test_08_user_data(self):
""" Test user data in virtual machines
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# 3. Deploy a vm in network1 and a vm in network2 using userdata
# Steps
# 1.Query for the user data for both the user vms from both networks
# User should be able to query the user data for the vms belonging to
# both the networks from the VR
try:
ssh = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
except Exception as e:
self.fail("Failed to SSH into instance")
# Find router associated with user account
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"Check list response returns a valid list"
)
router = routers[0]
self.debug("check the userdata with that of present in router")
try:
cmds = [
"wget http://%s/latest/user-data" % router.guestipaddress,
"cat user-data",
]
for c in cmds:
result = ssh.execute(c)
self.debug("%s: %s" % (c, result))
except Exception as e:
self.fail("Failed to SSH in Virtual machine: %s" % e)
res = str(result)
self.assertEqual(
res.count(
self.services["virtual_machine"]["userdata"]),
1,
"Verify user data from router"
)
return
@attr(tags=["advanced", "intervlan"])
def test_09_meta_data(self):
""" Test meta data in virtual machines
"""
# Validate the following
# 1. Create a VPC with cidr - 10.1.1.1/16
# 2. Add network1(10.1.1.1/24) and network2(10.1.2.1/24) to this VPC.
# 3. Deploy a vm in network1 and a vm in network2 using userdata
# Steps
# 1.Query for the meta data for both the user vms from both networks
# User should be able to query the user data for the vms belonging to
# both the networks from the VR
try:
ssh = self.vm_1.get_ssh_client(
ipaddress=self.public_ip_1.ipaddress.ipaddress)
self.debug("SSH into VM is successfully")
except Exception as e:
self.fail("Failed to SSH into instance")
# Find router associated with user account
routers = Router.list(
self.apiclient,
zoneid=self.zone.id,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"Check list response returns a valid list"
)
router = routers[0]
self.debug("check the metadata with that of present in router")
try:
cmds = [
"wget http://%s/latest/meta-data" % router.guestipaddress,
"cat user-data",
]
for c in cmds:
result = ssh.execute(c)
self.debug("%s: %s" % (c, result))
except Exception as e:
self.fail("Failed to SSH in Virtual machine: %s" % e)
res = str(result)
self.assertNotEqual(
res,
None,
"Meta data should be returned from router"
)
return
@attr(tags=["advanced", "intervlan"])
def test_10_expunge_instance_in_network(self):
""" Test expunge an instance in VPC networks
"""
# Validate the following
# 1. Recover the virtual machines.
# 2. Vm should be in stopped state. State both the instances
# 3. Make sure that all the PF,LB and Static NAT rules on this VM
# works as expected.
# 3. Make sure that we are able to access google.com from this user Vm
self.debug("Validating if the network rules work properly or not?")
self.validate_network_rules()
self.debug("Delete virtual machines in account: %s" %
self.account.name)
try:
self.vm_1.delete(self.apiclient)
self.vm_2.delete(self.apiclient)
except Exception as e:
self.fail("Failed to destroy the virtual instances, %s" % e)
self.debug(
"Waiting for expunge interval to cleanup the network and VMs")
wait_for_cleanup(
self.apiclient,
["expunge.interval", "expunge.delay"]
)
# Check if the network rules still exists after Vm stop
self.debug("Checking if NAT rules ")
nat_rules = NATRule.list(
self.apiclient,
id=self.nat_rule.id,
listall=True
)
self.assertEqual(
nat_rules,
None,
"List NAT rules should not return anything"
)
lb_rules = LoadBalancerRule.list(
self.apiclient,
id=self.lb_rule.id,
listall=True
)
self.assertEqual(
lb_rules,
None,
"List LB rules should not return anything"
)
return
| 41.006195 | 106 | 0.456901 | 10,644 | 112,521 | 4.746148 | 0.046787 | 0.026011 | 0.023754 | 0.019894 | 0.92955 | 0.920563 | 0.914882 | 0.907894 | 0.904826 | 0.898452 | 0 | 0.017731 | 0.466704 | 112,521 | 2,743 | 107 | 41.021145 | 0.824137 | 0.122706 | 0 | 0.820845 | 0 | 0 | 0.163854 | 0.003443 | 0 | 0 | 0 | 0 | 0.038905 | 1 | 0.029299 | false | 0.000961 | 0.003842 | 0 | 0.064361 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9c13bb275bbeb106631535c4838410d97d14fe4c | 7,840 | py | Python | tests/aws_batch_db/test_aws_batch_db.py | dazza-codes/aio-aws | 5bce9e0adbb6d4613748c3f9b6e265aa35144372 | [
"Apache-2.0"
] | 2 | 2021-09-14T10:10:23.000Z | 2021-12-07T02:42:00.000Z | tests/aws_batch_db/test_aws_batch_db.py | dazza-codes/aio-aws | 5bce9e0adbb6d4613748c3f9b6e265aa35144372 | [
"Apache-2.0"
] | 19 | 2020-10-13T02:41:25.000Z | 2022-03-29T06:09:38.000Z | tests/aws_batch_db/test_aws_batch_db.py | dazza-codes/aio-aws | 5bce9e0adbb6d4613748c3f9b6e265aa35144372 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-2021 Darren Weber
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test AWS Batch TinyDB
"""
import tinydb
from aio_aws.aws_batch_models import AWSBatchJob
from aio_aws.uuid_utils import valid_uuid4
def test_batch_job(aws_batch_job):
job = aws_batch_job
assert job.status == "SUCCEEDED"
assert isinstance(job.job_name, str)
assert isinstance(job.job_description, dict)
assert isinstance(job.job_tries, list)
assert job.job_tries == [job.job_id]
assert valid_uuid4(job.job_id)
assert job.created == 1584977374
assert job.started == 1584977376
assert job.stopped == 1584977386
assert job.elapsed == 1584977386 - 1584977374
assert job.runtime == 1584977386 - 1584977376
assert job.spinup == 1584977376 - 1584977374
def test_batch_job_db_save(test_jobs_db, aws_batch_job):
job = aws_batch_job
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
def test_batch_job_db_find_by_job_id(test_jobs_db, aws_batch_job):
job = aws_batch_job
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
job_doc = test_jobs_db.find_by_job_id(job.job_id)
assert isinstance(job_doc, tinydb.database.Document)
assert isinstance(job_doc, dict) # it's also a dict
assert job_doc["job_id"] == job.job_id
assert job_doc["job_name"] == job.job_name
assert job_doc["status"] == job.status
# test all the fields, since job_doc behaves like a dict
assert job.db_data == job_doc
def test_batch_job_db_remove_by_job_id(test_jobs_db, aws_batch_job):
job = aws_batch_job
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
job_doc = test_jobs_db.remove_by_job_id(job.job_id)
assert isinstance(job_doc, tinydb.database.Document)
assert job_doc["job_id"] == job.job_id
def test_batch_job_db_find_by_job_name(test_jobs_db, aws_batch_job):
job = aws_batch_job
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
job_docs = test_jobs_db.find_by_job_name(job.job_name)
assert isinstance(job_docs, list)
assert len(job_docs) == 1
job_doc = job_docs[0]
assert isinstance(job_doc, tinydb.database.Document)
assert isinstance(job_doc, dict) # it's also a dict
assert job_doc["job_id"] == job.job_id
assert job_doc["job_name"] == job.job_name
assert job_doc["status"] == job.status
# test all the fields, since job_doc behaves like a dict
assert job.db_data == job_doc
def test_batch_job_db_remove_by_job_name(test_jobs_db, aws_batch_job):
job = aws_batch_job
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
job_docs = test_jobs_db.remove_by_job_name(job.job_name)
assert isinstance(job_docs, list)
assert len(job_docs) == 1
job_doc = job_docs[0]
assert isinstance(job_doc, tinydb.database.Document)
assert job_doc["job_name"] == job.job_name
def test_batch_job_db_find_jobs_to_run(test_jobs_db, aws_batch_job):
job = aws_batch_job
job.status = "SUBMITTED" # this job can be 'recovered'
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
jobs = test_jobs_db.find_jobs_to_run()
assert isinstance(jobs, list)
assert len(jobs) == 1
assert isinstance(jobs[0], AWSBatchJob)
def test_batch_job_db_find_jobs_to_run_empty(test_jobs_db, aws_batch_job):
job = aws_batch_job
assert job.status == "SUCCEEDED"
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
jobs = test_jobs_db.find_jobs_to_run()
assert isinstance(jobs, list)
assert len(jobs) == 0 # successful jobs are done
def test_batch_job_db_filter_jobs_to_run(test_jobs_db, aws_batch_job):
job = aws_batch_job
job.status = "SUBMITTED" # this job can be 'recovered'
jobs = test_jobs_db.jobs_to_run([job])
assert isinstance(jobs, list)
assert len(jobs) == 1
assert isinstance(jobs[0], AWSBatchJob)
def test_batch_job_db_jobs_to_run_empty(test_jobs_db, aws_batch_job):
job = aws_batch_job
assert job.status == "SUCCEEDED"
jobs = test_jobs_db.jobs_to_run([job])
assert isinstance(jobs, list)
assert len(jobs) == 0 # successful jobs are done
def test_batch_job_db_saved_filter_jobs_to_run(test_jobs_db, aws_batch_job):
job = aws_batch_job
job.status = "SUBMITTED" # this job can be 'recovered'
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
jobs = test_jobs_db.jobs_to_run([job])
assert isinstance(jobs, list)
assert len(jobs) == 1
assert isinstance(jobs[0], AWSBatchJob)
def test_batch_job_db_saved_filter_jobs_to_run_empty(test_jobs_db, aws_batch_job):
job = aws_batch_job
assert job.status == "SUCCEEDED"
job_docs = test_jobs_db.save_job(job)
assert job_docs == [1]
jobs = test_jobs_db.jobs_to_run([job])
assert isinstance(jobs, list)
assert len(jobs) == 0 # successful jobs are done
def test_batch_job_db_saved_filter_jobs_to_run_for_recovery(
test_jobs_db, aws_batch_job
):
job = aws_batch_job
assert job.status == "SUCCEEDED"
job_docs = test_jobs_db.save_job(job)
jobs = test_jobs_db.jobs_to_run([job])
assert len(jobs) == 0 # successful jobs are done
# Assume the job is recreated and needs to be recovered from the db
job.reset()
assert job.job_id is None
assert job.job_name # used to recover the job from the db
jobs = test_jobs_db.jobs_to_run([job])
assert isinstance(jobs, list)
assert len(jobs) == 0 # the job.job_name is used to recover the job
def test_batch_job_db_find_latest_job_name(test_jobs_db, aws_batch_job):
job = aws_batch_job
test_jobs_db.save_job(job)
# Fake another submission of the same job-name
orig_job_id = job.job_id
fake_job_id = job.job_id.replace("08986fbb7144", "08986fbb7145")
job.job_id = fake_job_id
job.job_submission["jobId"] = fake_job_id
job.job_description["status"] = "SUCCEEDED"
job.status = job.job_description["status"]
job.job_description["createdAt"] += 5
job.job_description["startedAt"] += 5
job.job_description["stoppedAt"] += 5
test_jobs_db.save_job(job)
job_docs = test_jobs_db.find_by_job_name(job.job_name)
assert len(job_docs) == 2
assert [j["job_id"] for j in job_docs] == [orig_job_id, fake_job_id]
job_found = test_jobs_db.find_latest_job_name(job.job_name)
assert job_found.job_id == fake_job_id
def test_batch_job_db_saved_filter_jobs_to_run_with_duplicate(
test_jobs_db, aws_batch_job
):
job = aws_batch_job
# Fake a job failure
job.job_description["status"] = "FAILED"
job.status = job.job_description["status"]
test_jobs_db.save_job(job)
jobs = test_jobs_db.jobs_to_run([job])
assert len(jobs) == 1 # failed jobs could be run again (if reset)
# Fake another submission of the same job-name
fake_job_id = job.job_id.replace("08986fbb7144", "08986fbb7145")
job.job_id = fake_job_id
job.job_submission["jobId"] = fake_job_id
job.job_description["status"] = "SUCCEEDED"
job.status = job.job_description["status"]
job.job_description["createdAt"] += 5
job.job_description["startedAt"] += 5
job.job_description["stoppedAt"] += 5
test_jobs_db.save_job(job)
jobs = test_jobs_db.jobs_to_run([job])
assert len(jobs) == 0 # successful jobs are done
| 35.315315 | 82 | 0.72602 | 1,278 | 7,840 | 4.109546 | 0.125196 | 0.089109 | 0.083778 | 0.06131 | 0.77837 | 0.757997 | 0.737814 | 0.731912 | 0.703732 | 0.698972 | 0 | 0.026901 | 0.179719 | 7,840 | 221 | 83 | 35.475113 | 0.789768 | 0.156378 | 0 | 0.746914 | 0 | 0 | 0.046236 | 0 | 0 | 0 | 0 | 0 | 0.432099 | 1 | 0.092593 | false | 0 | 0.018519 | 0 | 0.111111 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
92bba7393d6ddb696978dcc2b06979e1fc486080 | 177,449 | py | Python | CESAPI/packet.py | MuffinSpawn/Leica | afb89ce88e4d8ee8841e4e35e6719b0723cb164a | [
"MIT"
] | 4 | 2021-06-11T06:10:14.000Z | 2021-11-29T06:58:57.000Z | CESAPI/packet.py | MuffinSpawn/Leica | afb89ce88e4d8ee8841e4e35e6719b0723cb164a | [
"MIT"
] | 10 | 2018-02-01T16:39:13.000Z | 2018-02-22T23:12:12.000Z | CESAPI/packet.py | MuffinSpawn/Leica | afb89ce88e4d8ee8841e4e35e6719b0723cb164a | [
"MIT"
] | 3 | 2018-12-29T09:02:19.000Z | 2020-07-20T06:09:55.000Z |
### This file was generated by the bin/pygen.py script using ###
### include/ES_C_API_Def_clean.h as input. ###
#')
# The variables and classes defined in this module handle
# serialization and deserialization of TCP packets consumed and
# emitted by Leica AT4xx laser trackers.
import struct
# enum ES_LengthUnit:
ES_LU_Meter = 0
ES_LU_Millimeter = 1
ES_LU_Micron = 2
ES_LU_Foot = 3
ES_LU_Yard = 4
ES_LU_Inch = 5
# enum ES_AngleUnit:
ES_AU_Radian = 0
ES_AU_Degree = 1
ES_AU_Gon = 2
# enum ES_TemperatureUnit:
ES_TU_Celsius = 0
ES_TU_Fahrenheit = 1
# enum ES_PressureUnit:
ES_PU_Mbar = 0
ES_PU_HPascal = 1
ES_PU_KPascal = 2
ES_PU_MmHg = 3
ES_PU_Psi = 4
ES_PU_InH2O = 5
ES_PU_InHg = 6
# enum ES_HumidityUnit:
ES_HU_RH = 0
# enum ES_CoordinateSystemType:
ES_CS_RHR = 0
ES_CS_LHRX = 1
ES_CS_LHRY = 2
ES_CS_LHRZ = 3
ES_CS_CCW = 4
ES_CS_CCC = 5
ES_CS_SCW = 6
ES_CS_SCC = 7
# enum ES_DataType:
ES_DT_Command = 0
ES_DT_Error = 1
ES_DT_SingleMeasResult = 2
ES_DT_NivelResult = 6
ES_DT_ReflectorPosResult = 7
ES_DT_SystemStatusChange = 8
ES_DT_SingleMeasResult2 = 9
# enum ES_Command:
ES_C_ExitApplication = 0
ES_C_GetSystemStatus = 1
ES_C_GetTrackerStatus = 2
ES_C_SetUnits = 5
ES_C_GetUnits = 6
ES_C_Initialize = 7
ES_C_ActivateCameraView = 9
ES_C_Park = 10
ES_C_SetStationOrientationParams = 12
ES_C_GetStationOrientationParams = 13
ES_C_SetTransformationParams = 14
ES_C_GetTransformationParams = 15
ES_C_SetEnvironmentParams = 20
ES_C_GetEnvironmentParams = 21
ES_C_SetRefractionParams = 22
ES_C_GetRefractionParams = 23
ES_C_SetMeasurementMode = 24
ES_C_GetMeasurementMode = 25
ES_C_SetCoordinateSystemType = 26
ES_C_GetCoordinateSystemType = 27
ES_C_SetStationaryModeParams = 28
ES_C_GetStationaryModeParams = 29
ES_C_SetReflector = 40
ES_C_GetReflector = 41
ES_C_GetReflectors = 42
ES_C_SetSearchParams = 43
ES_C_GetSearchParams = 44
ES_C_SetSystemSettings = 47
ES_C_GetSystemSettings = 48
ES_C_StartMeasurement = 49
ES_C_StartNivelMeasurement = 51
ES_C_StopMeasurement = 52
ES_C_ChangeFace = 53
ES_C_GoBirdBath = 54
ES_C_GoPosition = 55
ES_C_GoPositionHVD = 56
ES_C_PositionRelativeHV = 57
ES_C_PointLaser = 58
ES_C_PointLaserHVD = 59
ES_C_MoveHV = 60
ES_C_GoNivelPosition = 61
ES_C_GoLastMeasuredPoint = 62
ES_C_FindReflector = 63
ES_C_Unknown = 64
ES_C_GetDirection = 66
ES_C_CallOrientToGravity = 67
ES_C_SetCompensation = 79
ES_C_SetStatisticMode = 80
ES_C_GetStatisticMode = 81
ES_C_SetCameraParams = 83
ES_C_GetCameraParams = 84
ES_C_GetCompensation = 85
ES_C_GetCompensations = 86
ES_C_GetTPInfo = 90
ES_C_GetNivelInfo = 91
ES_C_SetLaserOnTimer = 92
ES_C_GetLaserOnTimer = 93
ES_C_GoBirdBath2 = 95
ES_C_GetFace = 98
ES_C_SetLongSystemParameter = 120
ES_C_GetLongSystemParameter = 121
ES_C_GetMeasurementStatusInfo = 122
ES_C_GetCompensations2 = 123
ES_C_SetDoubleSystemParameter = 125
ES_C_GetDoubleSystemParameter = 126
ES_C_GetObjectTemperature = 127
ES_C_GetOverviewCameraInfo = 129
ES_C_ClearCommandQueue = 130
ES_C_GetADMInfo2 = 131
ES_C_GetTrackerInfo = 132
ES_C_GetNivelInfo2 = 133
ES_C_RestoreStartupConditions = 134
ES_C_GoAndMeasure = 135
ES_C_GetATRInfo = 138
ES_C_GetMeteoStationInfo = 139
ES_C_GetAT4xxInfo = 140
ES_C_GetSystemSoftwareVersion = 142
# enum ES_MeasMode:
ES_MM_Stationary = 0
# enum ES_TargetType:
ES_TT_Unknown = 0
ES_TT_CornerCube = 1
ES_TT_CatsEye = 2
ES_TT_GlassPrism = 3
ES_TT_RFIPrism = 4
ES_TT_RRR15 = 5
ES_TT_RRR05 = 6
ES_TT_BRR15 = 7
ES_TT_BRR05 = 8
ES_TT_TBR05 = 9
# enum ES_TrackerStatus:
ES_TS_NotReady = 0
ES_TS_Busy = 1
ES_TS_Ready = 2
# enum ES_ResultStatus:
ES_RS_AllOK = 0
ES_RS_ServerBusy = 1
ES_RS_NotImplemented = 2
ES_RS_WrongParameter = 3
ES_RS_WrongParameter1 = 4
ES_RS_WrongParameter2 = 5
ES_RS_WrongParameter3 = 6
ES_RS_WrongParameter4 = 7
ES_RS_WrongParameter5 = 8
ES_RS_WrongParameter6 = 9
ES_RS_WrongParameter7 = 10
ES_RS_Parameter1OutOfRangeOK = 11
ES_RS_Parameter1OutOfRangeNOK = 12
ES_RS_Parameter2OutOfRangeOK = 13
ES_RS_Parameter2OutOfRangeNOK = 14
ES_RS_Parameter3OutOfRangeOK = 15
ES_RS_Parameter3OutOfRangeNOK = 16
ES_RS_Parameter4OutOfRangeOK = 17
ES_RS_Parameter4OutOfRangeNOK = 18
ES_RS_Parameter5OutOfRangeOK = 19
ES_RS_Parameter5OutOfRangeNOK = 20
ES_RS_Parameter6OutOfRangeOK = 21
ES_RS_Parameter6OutOfRangeNOK = 22
ES_RS_WrongCurrentReflector = 23
ES_RS_NoTPFound = 26
ES_RS_NoWeathermonitorFound = 27
ES_RS_NoLastMeasuredPoint = 28
ES_RS_NoVideoCamera = 29
ES_RS_NoAdm = 30
ES_RS_NoNivel = 31
ES_RS_WrongTPFirmware = 32
ES_RS_UsageConflict = 35
ES_RS_Unknown = 36
ES_RS_NoDistanceSet = 37
ES_RS_NoTrackerConnected = 38
ES_RS_TrackerNotInitialized = 39
ES_RS_ModuleNotStarted = 40
ES_RS_ModuleTimedOut = 41
ES_RS_NotInCameraPosition = 44
ES_RS_WrongParameter8 = 47
ES_RS_WrongParameter9 = 48
ES_RS_WrongParameter10 = 49
ES_RS_WrongParameter11 = 50
ES_RS_WrongParameter12 = 51
ES_RS_WrongParameter13 = 52
ES_RS_WrongParameter14 = 53
ES_RS_WrongParameter15 = 54
ES_RS_WrongParameter16 = 55
ES_RS_NoSuchCompensation = 56
ES_RS_MeteoDataOutOfRange = 57
ES_RS_NoDataToImport = 90
ES_RS_NoTemperatureFromWM = 94
ES_RS_NoPressureFromWM = 95
ES_RS_NoHumidityFromWM = 96
ES_RS_InvalidInputData = 98
ES_RS_NoValidADMCompensation = 10002
ES_RS_PressureSensorProblem = 10003
ES_RS_MeasurementStatusNotReady = 10004
ES_RS_ADMStartUpBusy = 10005
ES_RS_NoAtr = 10007
ES_RS_NoOVC = 10008
ES_RS_NoStationaryResult = 10009
ES_RS_SensorNotLeveled = 10010
ES_RS_MultiConnectionsNotAllowed = 10011
ES_RS_SensorNotStable = 10013
ES_RS_SystemNotReadyForMeasurement = 10014
ES_RS_CommunicationWithSensorFailed = 10015
ES_RS_No_Sensor_Battery = 10016
ES_RS_CompensatorNotAllowed = 10017
ES_RS_WarmedUpStateReached = 10018
ES_RS_NotLeveledForInitialization = 10019
ES_RS_ADMHardwareProblem = 10020
ES_RS_ATRHardwareProblem = 10021
# enum ES_TrackerProcessorStatus:
ES_TPS_NoTPFound = 0
ES_TPS_TPFound = 1
ES_TPS_Booted = 3
ES_TPS_CompensationSet = 4
ES_TPS_Initialized = 5
# enum ES_LaserProcessorStatus:
ES_LPS_LaserReady = 3
# enum ES_ADMStatus:
ES_AS_ADMReady = 2
# enum ES_NivelStatus:
ES_NS_AllOK = 0
ES_NS_OutOfRangeOK = 1
ES_NS_OutOfRangeNOK = 2
ES_NS_NoNivel = 3
# enum ES_WeatherMonitorStatus:
ES_WMS_NotConnected = 0
ES_WMS_ReadOnly = 1
ES_WMS_ReadAndCalculateRefractions = 2
# enum ES_SystemStatusChange:
ES_SSC_EnvironmentParamsChanged = 2
ES_SSC_RefractionParamsChanged = 3
ES_SSC_SearchParamsChanged = 4
ES_SSC_AdmParamsChanged = 5
ES_SSC_UnitsChanged = 6
ES_SSC_ReflectorChanged = 7
ES_SSC_SystemSettingsChanged = 8
ES_SSC_TemperatureRangeChanged = 9
ES_SSC_CameraParamsChanged = 10
ES_SSC_CompensationChanged = 11
ES_SSC_CoordinateSystemTypeChanged = 12
ES_SSC_StationOrientationParamsChanged = 15
ES_SSC_TransformationParamsChanged = 16
ES_SSC_MeasurementModeChanged = 17
ES_SSC_StationaryModeParamsChanged = 18
ES_SSC_StatisticModeChanged = 24
ES_SSC_MeasStatus_NotReady = 25
ES_SSC_MeasStatus_Busy = 26
ES_SSC_MeasStatus_Ready = 27
ES_SSC_IsFace1 = 30
ES_SSC_IsFace2 = 31
ES_SSC_ObjectTemperatureChanged = 105
ES_SSC_ProbeButton1Down = 110
ES_SSC_ProbeButton1Up = 111
ES_SSC_ProbeButton2Down = 120
ES_SSC_ProbeButton2Up = 121
ES_SSC_ProbeButton3Down = 130
ES_SSC_ProbeButton3Up = 131
ES_SSC_ProbeButton4Down = 140
ES_SSC_ProbeButton4Up = 141
ES_SCC_InitializationStatusChanged = 151
ES_SCC_TiltSensorStatusChanged = 152
ES_SSC_EmsysFilesImported = 820
ES_SSC_SensorDetected = 850
ES_SSC_SensorDisconnected = 851
ES_SSC_CompensatorStatusChanged = 852
ES_SSC_BatteryStatusChanged = 853
ES_SSC_TPConnectionClosing = 997
ES_SSC_ServerClosing = 998
ES_SSC_ServerStarted = 999
# enum ES_NivelPosition:
ES_NP_Pos1 = 0
ES_NP_Pos2 = 1
ES_NP_Pos3 = 2
ES_NP_Pos4 = 3
# enum ES_StatisticMode:
ES_SM_Standard = 0
ES_SM_Extended = 1
# enum ES_TrackerProcessorType:
ES_TT_Undefined = 0
ES_TT_ATC400 = 20
# enum ES_TPMicroProcessorType:
ES_TPM_Undefined = 0
ES_TPM_PXA250 = 20
# enum ES_LTSensorType:
ES_LTS_Undefined = 0
ES_LTS_AT401 = 70
ES_LTS_NoSensor = 99
# enum ES_TrackerFace:
ES_TF_Unknown = 0
ES_TF_Face1 = 1
ES_TF_Face2 = 2
# enum ES_ProbeButtonEvent:
ES_PBE_DisableEvents = 0
ES_PBE_EnableEvents = 1
# enum ES_PowerLockMode:
ES_PLM_InDoor = 0
ES_PLM_OutDoor = 1
ES_PLM_OutDoor_LongRange = 2
# enum ES_SystemParameter:
ES_SP_KeepLastPositionFlag = 0
ES_SP_WeatherMonitorSetting = 1
ES_SP_DisplayReflectorPosition = 10
ES_SP_ProbeConfig_ButtonEvent = 51
ES_SP_TcpCommandQueueSize = 200
ES_SP_PowerLockFunctionAvailable = 410
ES_SP_PowerLockFunctionActive = 411
ES_SP_PowerLockMode = 450
ES_SP_D_SystemLongest3DDistanceADM = 1101
ES_SP_AT4xxControllerBatteryStatus = 5000
ES_SP_AT4xxSensorBatteryStatus = 5001
ES_SP_AT4xxInclinationSensorState = 5002
# enum ES_MeasurementStatusInfo:
ES_MSI_Unknown = 0
ES_MSI_TrackerFound = 1
ES_MSI_TrackerCompensationFound = 2
ES_MSI_ADMFound = 4
ES_MSI_ADMCompensationFound = 8
ES_MSI_ReflectorFound = 2048
ES_MSI_InFace1 = 4096
ES_MSI_SensorBatteryMounted = 16384
ES_MSI_NivelInWorkingRange = 32768
ES_MSI_Initialized = 65536
# enum ES_ClearCommandQueueType:
ES_CCQ_ClearOwnOnly = 0
ES_CCQ_ClearAll = 1
# enum ES_OverviewCameraType:
ES_OCT_Unknown = 0
ES_OCT_Classic = 1
ES_OCT_AT4xx_Integrated = 20
# enum ES_ADMType:
ES_AMT_Unknown = 0
ES_AMT_LeicaADM2 = 3
# enum ES_ATRType:
ES_ATR_None = 0
ES_ATR_4 = 1
ES_ATR_5i = 2
# enum ES_TrkAccuracyModel:
ES_TAM_Unknown = 0
ES_TAM_2005 = 1
# enum ES_NivelType:
ES_NT_Unknown = 0
ES_NT_NivelAT4xx = 3
# enum ES_MeteoStationType:
ES_MST_None = 0
ES_MST_Thommen = 1
ES_MST_AT = 2
# enum ES_WLANType:
ES_WLAN_None = 0
ES_WLAN_OWL211 = 1
ES_WLAN_OWL221 = 2
# enum ES_InclinationSensorState:
ES_ISS_Off = 0
ES_ISS_ApplyCorrections = 2
class PacketHeaderT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 8
self.__sizes = [8]
self.__formats = [('<i I ')]
self.lPacketSize = int(0)
self.type = int(0) # ES_DataType
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.lPacketSize = packet_elements[0]
self.type = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.lPacketSize,)
packet_elements += (self.type,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class ReturnDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetHeader = PacketHeaderT()
self.status = int(0) # ES_ResultStatus
def unpack(self, packet):
self.packet = packet
packet = self.packetHeader.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.status = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetHeader.pack()
packet_elements = ()
packet_elements += (self.status,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class BasicCommandCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetHeader = PacketHeaderT()
self.command = int(0) # ES_Command
def unpack(self, packet):
self.packet = packet
packet = self.packetHeader.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.command = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetHeader.pack()
packet_elements = ()
packet_elements += (self.command,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class BasicCommandRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [8]
self.__formats = [('<I I ')]
self.packetHeader = PacketHeaderT()
self.command = int(0) # ES_Command
self.status = int(0) # ES_ResultStatus
def unpack(self, packet):
self.packet = packet
packet = self.packetHeader.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.command = packet_elements[0]
self.status = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetHeader.pack()
packet_elements = ()
packet_elements += (self.command,)
packet_elements += (self.status,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class NivelResultT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 40
self.__sizes = [28]
self.__formats = [('<I d d d ')]
self.packetInfo = ReturnDataT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_NivelResult
self.nivelStatus = int(0) # ES_NivelStatus
self.dXTilt = float(0)
self.dYTilt = float(0)
self.dNivelTemperature = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.nivelStatus = packet_elements[0]
self.dXTilt = packet_elements[1]
self.dYTilt = packet_elements[2]
self.dNivelTemperature = packet_elements[3]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.nivelStatus,)
packet_elements += (self.dXTilt,)
packet_elements += (self.dYTilt,)
packet_elements += (self.dNivelTemperature,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class ReflectorPosResultT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 36
self.__sizes = [24]
self.__formats = [('<d d d ')]
self.packetInfo = ReturnDataT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_ReflectorPosResult
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dVal1 = packet_elements[0]
self.dVal2 = packet_elements[1]
self.dVal3 = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SingleMeasResultT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 156
self.__sizes = [144]
self.__formats = [('<I i d d d d d d d d d d d d d d d d d ')]
self.packetInfo = ReturnDataT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_SingleMeasResult
self.measMode = int(0) # ES_MeasMode
self.bIsTryMode = int(0)
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
self.dStd1 = float(0)
self.dStd2 = float(0)
self.dStd3 = float(0)
self.dStdTotal = float(0)
self.dPointingError1 = float(0)
self.dPointingError2 = float(0)
self.dPointingError3 = float(0)
self.dAprioriStd1 = float(0)
self.dAprioriStd2 = float(0)
self.dAprioriStd3 = float(0)
self.dAprioriStdTotal = float(0)
self.dTemperature = float(0)
self.dPressure = float(0)
self.dHumidity = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.measMode = packet_elements[0]
self.bIsTryMode = packet_elements[1]
self.dVal1 = packet_elements[2]
self.dVal2 = packet_elements[3]
self.dVal3 = packet_elements[4]
self.dStd1 = packet_elements[5]
self.dStd2 = packet_elements[6]
self.dStd3 = packet_elements[7]
self.dStdTotal = packet_elements[8]
self.dPointingError1 = packet_elements[9]
self.dPointingError2 = packet_elements[10]
self.dPointingError3 = packet_elements[11]
self.dAprioriStd1 = packet_elements[12]
self.dAprioriStd2 = packet_elements[13]
self.dAprioriStd3 = packet_elements[14]
self.dAprioriStdTotal = packet_elements[15]
self.dTemperature = packet_elements[16]
self.dPressure = packet_elements[17]
self.dHumidity = packet_elements[18]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.measMode,)
packet_elements += (self.bIsTryMode,)
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
packet_elements += (self.dStd1,)
packet_elements += (self.dStd2,)
packet_elements += (self.dStd3,)
packet_elements += (self.dStdTotal,)
packet_elements += (self.dPointingError1,)
packet_elements += (self.dPointingError2,)
packet_elements += (self.dPointingError3,)
packet_elements += (self.dAprioriStd1,)
packet_elements += (self.dAprioriStd2,)
packet_elements += (self.dAprioriStd3,)
packet_elements += (self.dAprioriStdTotal,)
packet_elements += (self.dTemperature,)
packet_elements += (self.dPressure,)
packet_elements += (self.dHumidity,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SingleMeasResult2T(object):
def __init__(self):
self.packet = b''
self.__packet_size = 204
self.__sizes = [192]
self.__formats = [('<I i d d d d d d d d d d d d d d d d d d d d d d d ')]
self.packetInfo = ReturnDataT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_SingleMeasResult2
self.measMode = int(0) # ES_MeasMode
self.bIsTryMode = int(0)
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
self.dStd1 = float(0)
self.dStd2 = float(0)
self.dStd3 = float(0)
self.dStdTotal = float(0)
self.dCovar12 = float(0)
self.dCovar13 = float(0)
self.dCovar23 = float(0)
self.dPointingErrorH = float(0)
self.dPointingErrorV = float(0)
self.dPointingErrorD = float(0)
self.dAprioriStd1 = float(0)
self.dAprioriStd2 = float(0)
self.dAprioriStd3 = float(0)
self.dAprioriStdTotal = float(0)
self.dAprioriCovar12 = float(0)
self.dAprioriCovar13 = float(0)
self.dAprioriCovar23 = float(0)
self.dTemperature = float(0)
self.dPressure = float(0)
self.dHumidity = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.measMode = packet_elements[0]
self.bIsTryMode = packet_elements[1]
self.dVal1 = packet_elements[2]
self.dVal2 = packet_elements[3]
self.dVal3 = packet_elements[4]
self.dStd1 = packet_elements[5]
self.dStd2 = packet_elements[6]
self.dStd3 = packet_elements[7]
self.dStdTotal = packet_elements[8]
self.dCovar12 = packet_elements[9]
self.dCovar13 = packet_elements[10]
self.dCovar23 = packet_elements[11]
self.dPointingErrorH = packet_elements[12]
self.dPointingErrorV = packet_elements[13]
self.dPointingErrorD = packet_elements[14]
self.dAprioriStd1 = packet_elements[15]
self.dAprioriStd2 = packet_elements[16]
self.dAprioriStd3 = packet_elements[17]
self.dAprioriStdTotal = packet_elements[18]
self.dAprioriCovar12 = packet_elements[19]
self.dAprioriCovar13 = packet_elements[20]
self.dAprioriCovar23 = packet_elements[21]
self.dTemperature = packet_elements[22]
self.dPressure = packet_elements[23]
self.dHumidity = packet_elements[24]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.measMode,)
packet_elements += (self.bIsTryMode,)
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
packet_elements += (self.dStd1,)
packet_elements += (self.dStd2,)
packet_elements += (self.dStd3,)
packet_elements += (self.dStdTotal,)
packet_elements += (self.dCovar12,)
packet_elements += (self.dCovar13,)
packet_elements += (self.dCovar23,)
packet_elements += (self.dPointingErrorH,)
packet_elements += (self.dPointingErrorV,)
packet_elements += (self.dPointingErrorD,)
packet_elements += (self.dAprioriStd1,)
packet_elements += (self.dAprioriStd2,)
packet_elements += (self.dAprioriStd3,)
packet_elements += (self.dAprioriStdTotal,)
packet_elements += (self.dAprioriCovar12,)
packet_elements += (self.dAprioriCovar13,)
packet_elements += (self.dAprioriCovar23,)
packet_elements += (self.dTemperature,)
packet_elements += (self.dPressure,)
packet_elements += (self.dHumidity,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SystemStatusChangeT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetHeader = PacketHeaderT()
self.packetHeader.lPacketSize = self.__packet_size
self.packetHeader.type = ES_DT_SystemStatusChange
self.systemStatusChange = int(0) # ES_SystemStatusChange
def unpack(self, packet):
self.packet = packet
packet = self.packetHeader.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.systemStatusChange = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetHeader.pack()
packet_elements = ()
packet_elements += (self.systemStatusChange,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class ErrorResponseT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [8]
self.__formats = [('<I I ')]
self.packetHeader = PacketHeaderT()
self.packetHeader.lPacketSize = self.__packet_size
self.packetHeader.type = ES_DT_Error
self.command = int(0) # ES_Command
self.status = int(0) # ES_ResultStatus
def unpack(self, packet):
self.packet = packet
packet = self.packetHeader.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.command = packet_elements[0]
self.status = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetHeader.pack()
packet_elements = ()
packet_elements += (self.command,)
packet_elements += (self.status,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class InitializeCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_Initialize
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class InitializeRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_Initialize
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ActivateCameraViewCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ActivateCameraView
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ActivateCameraViewRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ActivateCameraView
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ParkCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_Park
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ParkRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_Park
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GoBirdBathCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoBirdBath
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GoBirdBathRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoBirdBath
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GoBirdBath2CT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoBirdBath2
self.bClockWise = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.bClockWise = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.bClockWise,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GoBirdBath2RT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoBirdBath2
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ChangeFaceCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ChangeFace
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ChangeFaceRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ChangeFace
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class StartNivelMeasurementCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_StartNivelMeasurement
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class StartNivelMeasurementRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_StartNivelMeasurement
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class StartMeasurementCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_StartMeasurement
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class StartMeasurementRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_StartMeasurement
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class StopMeasurementCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_StopMeasurement
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class StopMeasurementRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_StopMeasurement
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ExitApplicationCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ExitApplication
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class ExitApplicationRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ExitApplication
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GoLastMeasuredPointCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoLastMeasuredPoint
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GoLastMeasuredPointRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoLastMeasuredPoint
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class FindReflectorCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [8]
self.__formats = [('<d ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_FindReflector
self.dAproxDistance = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dAproxDistance = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dAproxDistance,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class FindReflectorRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_FindReflector
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class SetCoordinateSystemTypeCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetCoordinateSystemType
self.coordSysType = int(0) # ES_CoordinateSystemType
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.coordSysType = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.coordSysType,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetCoordinateSystemTypeRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetCoordinateSystemType
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCoordinateSystemTypeCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCoordinateSystemType
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCoordinateSystemTypeRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCoordinateSystemType
self.coordSysType = int(0) # ES_CoordinateSystemType
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.coordSysType = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.coordSysType,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetMeasurementModeCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetMeasurementMode
self.measMode = int(0) # ES_MeasMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.measMode = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.measMode,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetMeasurementModeRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetMeasurementMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetMeasurementModeCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetMeasurementMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetMeasurementModeRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetMeasurementMode
self.measMode = int(0) # ES_MeasMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.measMode = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.measMode,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SearchParamsDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = [12]
self.__formats = [('<d i ')]
self.dSearchRadius = float(0)
self.lTimeOut = int(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dSearchRadius = packet_elements[0]
self.lTimeOut = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.dSearchRadius,)
packet_elements += (self.lTimeOut,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetSearchParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetSearchParams
self.searchParams = SearchParamsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.searchParams.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.searchParams.pack()
return self.packet
class SetSearchParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetSearchParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetSearchParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSearchParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetSearchParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 28
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSearchParams
self.searchParams = SearchParamsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.searchParams.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.searchParams.pack()
return self.packet
class StationaryModeDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 8
self.__sizes = [8]
self.__formats = [('<i i ')]
self.lMeasTime = int(0)
self.bUseADM = int(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.lMeasTime = packet_elements[0]
self.bUseADM = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.lMeasTime,)
packet_elements += (self.bUseADM,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetStationaryModeParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetStationaryModeParams
self.stationaryModeData = StationaryModeDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.stationaryModeData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.stationaryModeData.pack()
return self.packet
class SetStationaryModeParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetStationaryModeParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetStationaryModeParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetStationaryModeParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetStationaryModeParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetStationaryModeParams
self.stationaryModeData = StationaryModeDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.stationaryModeData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.stationaryModeData.pack()
return self.packet
class SystemSettingsDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 36
self.__sizes = [36]
self.__formats = [('<I i i i i i i i i ')]
self.weatherMonitorStatus = int(0) # ES_WeatherMonitorStatus
self.bApplyTransformationParams = int(0)
self.bApplyStationOrientationParams = int(0)
self.bKeepLastPosition = int(0)
self.bSendUnsolicitedMessages = int(0)
self.bSendReflectorPositionData = int(0)
self.bTryMeasurementMode = int(0)
self.bHasNivel = int(0)
self.bHasVideoCamera = int(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.weatherMonitorStatus = packet_elements[0]
self.bApplyTransformationParams = packet_elements[1]
self.bApplyStationOrientationParams = packet_elements[2]
self.bKeepLastPosition = packet_elements[3]
self.bSendUnsolicitedMessages = packet_elements[4]
self.bSendReflectorPositionData = packet_elements[5]
self.bTryMeasurementMode = packet_elements[6]
self.bHasNivel = packet_elements[7]
self.bHasVideoCamera = packet_elements[8]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.weatherMonitorStatus,)
packet_elements += (self.bApplyTransformationParams,)
packet_elements += (self.bApplyStationOrientationParams,)
packet_elements += (self.bKeepLastPosition,)
packet_elements += (self.bSendUnsolicitedMessages,)
packet_elements += (self.bSendReflectorPositionData,)
packet_elements += (self.bTryMeasurementMode,)
packet_elements += (self.bHasNivel,)
packet_elements += (self.bHasVideoCamera,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetSystemSettingsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 48
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetSystemSettings
self.systemSettings = SystemSettingsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.systemSettings.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.systemSettings.pack()
return self.packet
class SetSystemSettingsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetSystemSettings
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetSystemSettingsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSystemSettings
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetSystemSettingsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 52
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSystemSettings
self.systemSettings = SystemSettingsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.systemSettings.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.systemSettings.pack()
return self.packet
class SystemUnitsDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [20]
self.__formats = [('<I I I I I ')]
self.lenUnitType = int(0) # ES_LengthUnit
self.angUnitType = int(0) # ES_AngleUnit
self.tempUnitType = int(0) # ES_TemperatureUnit
self.pressUnitType = int(0) # ES_PressureUnit
self.humUnitType = int(0) # ES_HumidityUnit
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.lenUnitType = packet_elements[0]
self.angUnitType = packet_elements[1]
self.tempUnitType = packet_elements[2]
self.pressUnitType = packet_elements[3]
self.humUnitType = packet_elements[4]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.lenUnitType,)
packet_elements += (self.angUnitType,)
packet_elements += (self.tempUnitType,)
packet_elements += (self.pressUnitType,)
packet_elements += (self.humUnitType,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetUnitsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 32
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetUnits
self.unitsSettings = SystemUnitsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.unitsSettings.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.unitsSettings.pack()
return self.packet
class SetUnitsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetUnits
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetUnitsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetUnits
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetUnitsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 36
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetUnits
self.unitsSettings = SystemUnitsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.unitsSettings.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.unitsSettings.pack()
return self.packet
class ESVersionNumberT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = [12]
self.__formats = [('<i i i ')]
self.iMajorVersionNumber = int(0)
self.iMinorVersionNumber = int(0)
self.iBuildNumber = int(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iMajorVersionNumber = packet_elements[0]
self.iMinorVersionNumber = packet_elements[1]
self.iBuildNumber = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.iMajorVersionNumber,)
packet_elements += (self.iMinorVersionNumber,)
packet_elements += (self.iBuildNumber,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetSystemStatusCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSystemStatus
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetSystemStatusRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 56
self.__sizes = [16,12]
self.__formats = [('<I I I I '),('<I i i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSystemStatus
self.lastResultStatus = int(0) # ES_ResultStatus
self.trackerProcessorStatus = int(0) # ES_TrackerProcessorStatus
self.laserStatus = int(0) # ES_LaserProcessorStatus
self.admStatus = int(0) # ES_ADMStatus
self.esVersionNumber = ESVersionNumberT()
self.weatherMonitorStatus = int(0) # ES_WeatherMonitorStatus
self.lFlagsValue = int(0)
self.lTrackerSerialNumber = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.lastResultStatus = packet_elements[0]
self.trackerProcessorStatus = packet_elements[1]
self.laserStatus = packet_elements[2]
self.admStatus = packet_elements[3]
packet = self.esVersionNumber.unpack(packet[self.__sizes[0]:])
packet_elements = struct.Struct(self.__formats[1]).unpack(packet[:self.__sizes[1]])
self.weatherMonitorStatus = packet_elements[0]
self.lFlagsValue = packet_elements[1]
self.lTrackerSerialNumber = packet_elements[2]
return packet[self.__sizes[1]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.lastResultStatus,)
packet_elements += (self.trackerProcessorStatus,)
packet_elements += (self.laserStatus,)
packet_elements += (self.admStatus,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
self.packet += self.esVersionNumber.pack()
packet_elements = ()
packet_elements += (self.weatherMonitorStatus,)
packet_elements += (self.lFlagsValue,)
packet_elements += (self.lTrackerSerialNumber,)
self.packet += struct.Struct(self.__formats[1]).pack(*packet_elements)
return self.packet
class GetMeasurementStatusInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetMeasurementStatusInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetMeasurementStatusInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = [8]
self.__formats = [('<I i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetMeasurementStatusInfo
self.lastResultStatus = int(0) # ES_ResultStatus
self.lMeasurementStatusInfo = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.lastResultStatus = packet_elements[0]
self.lMeasurementStatusInfo = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.lastResultStatus,)
packet_elements += (self.lMeasurementStatusInfo,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetTrackerStatusCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTrackerStatus
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetTrackerStatusRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTrackerStatus
self.trackerStatus = int(0) # ES_TrackerStatus
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.trackerStatus = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.trackerStatus,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetReflectorCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetReflector
self.iInternalReflectorId = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iInternalReflectorId = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iInternalReflectorId,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetReflectorRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetReflector
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetReflectorsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetReflectors
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetReflectorsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 100
self.__sizes = [84]
self.__formats = [('<i i I d 64s ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetReflectors
self.iTotalReflectors = int(0)
self.iInternalReflectorId = int(0)
self.targetType = int(0) # ES_TargetType
self.dSurfaceOffset = float(0)
self.cReflectorName = b'' # 64 bytes max
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iTotalReflectors = packet_elements[0]
self.iInternalReflectorId = packet_elements[1]
self.targetType = packet_elements[2]
self.dSurfaceOffset = packet_elements[3]
self.cReflectorName = packet_elements[4]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iTotalReflectors,)
packet_elements += (self.iInternalReflectorId,)
packet_elements += (self.targetType,)
packet_elements += (self.dSurfaceOffset,)
packet_elements += (self.cReflectorName,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetReflectorCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetReflector
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetReflectorRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [4]
self.__formats = [('<i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetReflector
self.iInternalReflectorId = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iInternalReflectorId = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iInternalReflectorId,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class EnvironmentDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = [24]
self.__formats = [('<d d d ')]
self.dTemperature = float(0)
self.dPressure = float(0)
self.dHumidity = float(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dTemperature = packet_elements[0]
self.dPressure = packet_elements[1]
self.dHumidity = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.dTemperature,)
packet_elements += (self.dPressure,)
packet_elements += (self.dHumidity,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetEnvironmentParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 36
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetEnvironmentParams
self.environmentData = EnvironmentDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.environmentData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.environmentData.pack()
return self.packet
class SetEnvironmentParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetEnvironmentParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetEnvironmentParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetEnvironmentParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetEnvironmentParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 40
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetEnvironmentParams
self.environmentData = EnvironmentDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.environmentData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.environmentData.pack()
return self.packet
class RefractionDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [16]
self.__formats = [('<d d ')]
self.dIfmRefractionIndex = float(0)
self.dAdmRefractionIndex = float(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dIfmRefractionIndex = packet_elements[0]
self.dAdmRefractionIndex = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.dIfmRefractionIndex,)
packet_elements += (self.dAdmRefractionIndex,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetRefractionParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 28
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetRefractionParams
self.refractionData = RefractionDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.refractionData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.refractionData.pack()
return self.packet
class SetRefractionParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetRefractionParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetRefractionParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetRefractionParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetRefractionParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 32
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetRefractionParams
self.refractionData = RefractionDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.refractionData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.refractionData.pack()
return self.packet
class StationOrientationDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 48
self.__sizes = [48]
self.__formats = [('<d d d d d d ')]
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
self.dRot1 = float(0)
self.dRot2 = float(0)
self.dRot3 = float(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dVal1 = packet_elements[0]
self.dVal2 = packet_elements[1]
self.dVal3 = packet_elements[2]
self.dRot1 = packet_elements[3]
self.dRot2 = packet_elements[4]
self.dRot3 = packet_elements[5]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
packet_elements += (self.dRot1,)
packet_elements += (self.dRot2,)
packet_elements += (self.dRot3,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetStationOrientationParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 60
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetStationOrientationParams
self.stationOrientation = StationOrientationDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.stationOrientation.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.stationOrientation.pack()
return self.packet
class SetStationOrientationParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetStationOrientationParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetStationOrientationParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetStationOrientationParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetStationOrientationParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 64
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetStationOrientationParams
self.stationOrientation = StationOrientationDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.stationOrientation.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.stationOrientation.pack()
return self.packet
class TransformationDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 56
self.__sizes = [56]
self.__formats = [('<d d d d d d d ')]
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
self.dRot1 = float(0)
self.dRot2 = float(0)
self.dRot3 = float(0)
self.dScale = float(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dVal1 = packet_elements[0]
self.dVal2 = packet_elements[1]
self.dVal3 = packet_elements[2]
self.dRot1 = packet_elements[3]
self.dRot2 = packet_elements[4]
self.dRot3 = packet_elements[5]
self.dScale = packet_elements[6]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
packet_elements += (self.dRot1,)
packet_elements += (self.dRot2,)
packet_elements += (self.dRot3,)
packet_elements += (self.dScale,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetTransformationParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 68
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetTransformationParams
self.transformationData = TransformationDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.transformationData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.transformationData.pack()
return self.packet
class SetTransformationParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetTransformationParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetTransformationParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTransformationParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetTransformationParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 72
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTransformationParams
self.transformationData = TransformationDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.transformationData.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.transformationData.pack()
return self.packet
class GoPositionCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 40
self.__sizes = [28]
self.__formats = [('<d d d i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoPosition
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
self.bUseADM = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dVal1 = packet_elements[0]
self.dVal2 = packet_elements[1]
self.dVal3 = packet_elements[2]
self.bUseADM = packet_elements[3]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
packet_elements += (self.bUseADM,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GoPositionRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoPosition
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetDirectionCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetDirection
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetDirectionRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 32
self.__sizes = [16]
self.__formats = [('<d d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetDirection
self.dHzAngle = float(0)
self.dVtAngle = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dHzAngle = packet_elements[0]
self.dVtAngle = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dHzAngle,)
packet_elements += (self.dVtAngle,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GoPositionHVDCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 40
self.__sizes = [28]
self.__formats = [('<d d d i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoPositionHVD
self.dHzAngle = float(0)
self.dVtAngle = float(0)
self.dDistance = float(0)
self.bUseADM = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dHzAngle = packet_elements[0]
self.dVtAngle = packet_elements[1]
self.dDistance = packet_elements[2]
self.bUseADM = packet_elements[3]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dHzAngle,)
packet_elements += (self.dVtAngle,)
packet_elements += (self.dDistance,)
packet_elements += (self.bUseADM,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GoPositionHVDRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoPositionHVD
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class PointLaserCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 36
self.__sizes = [24]
self.__formats = [('<d d d ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_PointLaser
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dVal1 = packet_elements[0]
self.dVal2 = packet_elements[1]
self.dVal3 = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class PointLaserRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_PointLaser
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class PositionRelativeHVCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 28
self.__sizes = [16]
self.__formats = [('<d d ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_PositionRelativeHV
self.dHzVal = float(0)
self.dVtVal = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dHzVal = packet_elements[0]
self.dVtVal = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dHzVal,)
packet_elements += (self.dVtVal,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class PositionRelativeHVRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_PositionRelativeHV
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class PointLaserHVDCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 36
self.__sizes = [24]
self.__formats = [('<d d d ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_PointLaserHVD
self.dHzAngle = float(0)
self.dVtAngle = float(0)
self.dDistance = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dHzAngle = packet_elements[0]
self.dVtAngle = packet_elements[1]
self.dDistance = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dHzAngle,)
packet_elements += (self.dVtAngle,)
packet_elements += (self.dDistance,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class PointLaserHVDRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_PointLaserHVD
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class MoveHVCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [8]
self.__formats = [('<i i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_MoveHV
self.iHzSpeed = int(0)
self.iVtSpeed = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iHzSpeed = packet_elements[0]
self.iVtSpeed = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iHzSpeed,)
packet_elements += (self.iVtSpeed,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class MoveHVRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_MoveHV
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GoNivelPositionCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoNivelPosition
self.nivelPosition = int(0) # ES_NivelPosition
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.nivelPosition = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.nivelPosition,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GoNivelPositionRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoNivelPosition
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class CallOrientToGravityCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_CallOrientToGravity
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class CallOrientToGravityRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 32
self.__sizes = [16]
self.__formats = [('<d d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_CallOrientToGravity
self.dOmega = float(0)
self.dPhi = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dOmega = packet_elements[0]
self.dPhi = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dOmega,)
packet_elements += (self.dPhi,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetCompensationCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetCompensation
self.iInternalCompensationId = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iInternalCompensationId = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iInternalCompensationId,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetCompensationRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetCompensation
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCompensationCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCompensation
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCompensationRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [4]
self.__formats = [('<i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCompensation
self.iInternalCompensationId = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iInternalCompensationId = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iInternalCompensationId,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetCompensationsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCompensations
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCompensationsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 412
self.__sizes = [396]
self.__formats = [('<i i 64s 256s 64s i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCompensations
self.iTotalCompensations = int(0)
self.iInternalCompensationId = int(0)
self.cTrackerCompensationName = b'' # 64 bytes max
self.cTrackerCompensationComment = b'' # 256 bytes max
self.cADMCompensationName = b'' # 64 bytes max
self.bHasMeasurementCameraMounted = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iTotalCompensations = packet_elements[0]
self.iInternalCompensationId = packet_elements[1]
self.cTrackerCompensationName = packet_elements[2]
self.cTrackerCompensationComment = packet_elements[3]
self.cADMCompensationName = packet_elements[4]
self.bHasMeasurementCameraMounted = packet_elements[5]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iTotalCompensations,)
packet_elements += (self.iInternalCompensationId,)
packet_elements += (self.cTrackerCompensationName,)
packet_elements += (self.cTrackerCompensationComment,)
packet_elements += (self.cADMCompensationName,)
packet_elements += (self.bHasMeasurementCameraMounted,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetCompensations2CT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCompensations2
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCompensations2RT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 672
self.__sizes = [656]
self.__formats = [('<i i 64s 256s 64s 256s i i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCompensations2
self.iTotalCompensations = int(0)
self.iInternalCompensationId = int(0)
self.cTrackerCompensationName = b'' # 64 bytes max
self.cTrackerCompensationComment = b'' # 256 bytes max
self.cADMCompensationName = b'' # 64 bytes max
self.cADMCompensationComment = b'' # 256 bytes max
self.bHasMeasurementCameraMounted = int(0)
self.bIsActive = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iTotalCompensations = packet_elements[0]
self.iInternalCompensationId = packet_elements[1]
self.cTrackerCompensationName = packet_elements[2]
self.cTrackerCompensationComment = packet_elements[3]
self.cADMCompensationName = packet_elements[4]
self.cADMCompensationComment = packet_elements[5]
self.bHasMeasurementCameraMounted = packet_elements[6]
self.bIsActive = packet_elements[7]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iTotalCompensations,)
packet_elements += (self.iInternalCompensationId,)
packet_elements += (self.cTrackerCompensationName,)
packet_elements += (self.cTrackerCompensationComment,)
packet_elements += (self.cADMCompensationName,)
packet_elements += (self.cADMCompensationComment,)
packet_elements += (self.bHasMeasurementCameraMounted,)
packet_elements += (self.bIsActive,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetStatisticModeCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [8]
self.__formats = [('<I I ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetStatisticMode
self.stationaryMeasurements = int(0) # ES_StatisticMode
self.continuousMeasurements = int(0) # ES_StatisticMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.stationaryMeasurements = packet_elements[0]
self.continuousMeasurements = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.stationaryMeasurements,)
packet_elements += (self.continuousMeasurements,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetStatisticModeRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetStatisticMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetStatisticModeCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetStatisticMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetStatisticModeRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = [8]
self.__formats = [('<I I ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetStatisticMode
self.stationaryMeasurements = int(0) # ES_StatisticMode
self.continuousMeasurements = int(0) # ES_StatisticMode
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.stationaryMeasurements = packet_elements[0]
self.continuousMeasurements = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.stationaryMeasurements,)
packet_elements += (self.continuousMeasurements,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class CameraParamsDataT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = [12]
self.__formats = [('<i i i ')]
self.iContrast = int(0)
self.iBrightness = int(0)
self.iSaturation = int(0)
def unpack(self, packet):
self.packet = packet
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iContrast = packet_elements[0]
self.iBrightness = packet_elements[1]
self.iSaturation = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
packet_elements = ()
packet_elements += (self.iContrast,)
packet_elements += (self.iBrightness,)
packet_elements += (self.iSaturation,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetCameraParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetCameraParams
self.cameraParams = CameraParamsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.cameraParams.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.cameraParams.pack()
return self.packet
class SetCameraParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetCameraParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCameraParamsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCameraParams
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetCameraParamsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 28
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetCameraParams
self.cameraParams = CameraParamsDataT()
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet = self.cameraParams.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
self.packet += self.cameraParams.pack()
return self.packet
class GetADMInfo2CT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetADMInfo2
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetADMInfo2RT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 124
self.__sizes = [108]
self.__formats = [('<I 64s i i i d d i d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetADMInfo2
self.admType = int(0) # ES_ADMType
self.cADMName = b'' # 64 bytes max
self.lSerialNumber = int(0)
self.iFirmwareMajorVersionNumber = int(0)
self.iFirmwareMinorVersionNumber = int(0)
self.dMaxDistance = float(0)
self.dMinDistance = float(0)
self.iMaxDataRate = int(0)
self.dAccuracyADMDistance = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.admType = packet_elements[0]
self.cADMName = packet_elements[1]
self.lSerialNumber = packet_elements[2]
self.iFirmwareMajorVersionNumber = packet_elements[3]
self.iFirmwareMinorVersionNumber = packet_elements[4]
self.dMaxDistance = packet_elements[5]
self.dMinDistance = packet_elements[6]
self.iMaxDataRate = packet_elements[7]
self.dAccuracyADMDistance = packet_elements[8]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.admType,)
packet_elements += (self.cADMName,)
packet_elements += (self.lSerialNumber,)
packet_elements += (self.iFirmwareMajorVersionNumber,)
packet_elements += (self.iFirmwareMinorVersionNumber,)
packet_elements += (self.dMaxDistance,)
packet_elements += (self.dMinDistance,)
packet_elements += (self.iMaxDataRate,)
packet_elements += (self.dAccuracyADMDistance,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetNivelInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetNivelInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetNivelInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 28
self.__sizes = [12]
self.__formats = [('<i i i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetNivelInfo
self.iFirmwareMajorVersionNumber = int(0)
self.iFirmwareMinorVersionNumber = int(0)
self.lSerialNumber = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iFirmwareMajorVersionNumber = packet_elements[0]
self.iFirmwareMinorVersionNumber = packet_elements[1]
self.lSerialNumber = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iFirmwareMajorVersionNumber,)
packet_elements += (self.iFirmwareMinorVersionNumber,)
packet_elements += (self.lSerialNumber,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetNivelInfo2CT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetNivelInfo2
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetNivelInfo2RT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 120
self.__sizes = [104]
self.__formats = [('<I 64s i i i d d d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetNivelInfo2
self.nivelType = int(0) # ES_NivelType
self.cNivelName = b'' # 64 bytes max
self.lSerialNumber = int(0)
self.iFirmwareMajorVersionNumber = int(0)
self.iFirmwareMinorVersionNumber = int(0)
self.dMeasurementRange = float(0)
self.dMeasurementAccuracyOffset = float(0)
self.dMeasurementAccuracyFactor = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.nivelType = packet_elements[0]
self.cNivelName = packet_elements[1]
self.lSerialNumber = packet_elements[2]
self.iFirmwareMajorVersionNumber = packet_elements[3]
self.iFirmwareMinorVersionNumber = packet_elements[4]
self.dMeasurementRange = packet_elements[5]
self.dMeasurementAccuracyOffset = packet_elements[6]
self.dMeasurementAccuracyFactor = packet_elements[7]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.nivelType,)
packet_elements += (self.cNivelName,)
packet_elements += (self.lSerialNumber,)
packet_elements += (self.iFirmwareMajorVersionNumber,)
packet_elements += (self.iFirmwareMinorVersionNumber,)
packet_elements += (self.dMeasurementRange,)
packet_elements += (self.dMeasurementAccuracyOffset,)
packet_elements += (self.dMeasurementAccuracyFactor,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetTPInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTPInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetTPInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 56
self.__sizes = [40]
self.__formats = [('<i i i i i i I I i I ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTPInfo
self.iTPBootMajorVersionNumber = int(0)
self.iTPBootMinorVersionNumber = int(0)
self.iTPFirmwareMajorVersionNumber = int(0)
self.iTPFirmwareMinorVersionNumber = int(0)
self.iLCPFirmwareMajorVersionNumber = int(0)
self.iLCPFirmwareMinorVersionNumber = int(0)
self.trackerprocessorType = int(0) # ES_TrackerProcessorType
self.microProcessorType = int(0) # ES_TPMicroProcessorType
self.iMicroProcessorClockSpeed = int(0)
self.laserTrackerSensorType = int(0) # ES_LTSensorType
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iTPBootMajorVersionNumber = packet_elements[0]
self.iTPBootMinorVersionNumber = packet_elements[1]
self.iTPFirmwareMajorVersionNumber = packet_elements[2]
self.iTPFirmwareMinorVersionNumber = packet_elements[3]
self.iLCPFirmwareMajorVersionNumber = packet_elements[4]
self.iLCPFirmwareMinorVersionNumber = packet_elements[5]
self.trackerprocessorType = packet_elements[6]
self.microProcessorType = packet_elements[7]
self.iMicroProcessorClockSpeed = packet_elements[8]
self.laserTrackerSensorType = packet_elements[9]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iTPBootMajorVersionNumber,)
packet_elements += (self.iTPBootMinorVersionNumber,)
packet_elements += (self.iTPFirmwareMajorVersionNumber,)
packet_elements += (self.iTPFirmwareMinorVersionNumber,)
packet_elements += (self.iLCPFirmwareMajorVersionNumber,)
packet_elements += (self.iLCPFirmwareMinorVersionNumber,)
packet_elements += (self.trackerprocessorType,)
packet_elements += (self.microProcessorType,)
packet_elements += (self.iMicroProcessorClockSpeed,)
packet_elements += (self.laserTrackerSensorType,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetTrackerInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTrackerInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetTrackerInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 164
self.__sizes = [148]
self.__formats = [('<I 64s i i i i i d d d i i d d I i i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetTrackerInfo
self.trackerType = int(0) # ES_LTSensorType
self.cTrackerName = b'' # 64 bytes max
self.lSerialNumber = int(0)
self.lCompensationIdNumber = int(0)
self.bHasADM = int(0)
self.bHasOverviewCamera = int(0)
self.bHasNivel = int(0)
self.dNivelMountOffset = float(0)
self.dMaxDistance = float(0)
self.dMinDistance = float(0)
self.iMaxDataRate = int(0)
self.iNumberOfFaces = int(0)
self.dHzAngleRange = float(0)
self.dVtAngleRange = float(0)
self.accuracyModel = int(0) # ES_TrkAccuracyModel
self.iMajLCPFirmwareVersion = int(0)
self.iMinLCPFirmwareVersion = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.trackerType = packet_elements[0]
self.cTrackerName = packet_elements[1]
self.lSerialNumber = packet_elements[2]
self.lCompensationIdNumber = packet_elements[3]
self.bHasADM = packet_elements[4]
self.bHasOverviewCamera = packet_elements[5]
self.bHasNivel = packet_elements[6]
self.dNivelMountOffset = packet_elements[7]
self.dMaxDistance = packet_elements[8]
self.dMinDistance = packet_elements[9]
self.iMaxDataRate = packet_elements[10]
self.iNumberOfFaces = packet_elements[11]
self.dHzAngleRange = packet_elements[12]
self.dVtAngleRange = packet_elements[13]
self.accuracyModel = packet_elements[14]
self.iMajLCPFirmwareVersion = packet_elements[15]
self.iMinLCPFirmwareVersion = packet_elements[16]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.trackerType,)
packet_elements += (self.cTrackerName,)
packet_elements += (self.lSerialNumber,)
packet_elements += (self.lCompensationIdNumber,)
packet_elements += (self.bHasADM,)
packet_elements += (self.bHasOverviewCamera,)
packet_elements += (self.bHasNivel,)
packet_elements += (self.dNivelMountOffset,)
packet_elements += (self.dMaxDistance,)
packet_elements += (self.dMinDistance,)
packet_elements += (self.iMaxDataRate,)
packet_elements += (self.iNumberOfFaces,)
packet_elements += (self.dHzAngleRange,)
packet_elements += (self.dVtAngleRange,)
packet_elements += (self.accuracyModel,)
packet_elements += (self.iMajLCPFirmwareVersion,)
packet_elements += (self.iMinLCPFirmwareVersion,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetATRInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetATRInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetATRInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 140
self.__sizes = [124]
self.__formats = [('<I 64s i i i i i i d d d d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetATRInfo
self.atrType = int(0) # ES_ATRType
self.cATRName = b'' # 64 bytes max
self.lMajFirmwareVersion = int(0)
self.lMinFirmwareVersion = int(0)
self.lBuildFirmwareVersion = int(0)
self.lHardwareVersion = int(0)
self.lErrorcode = int(0)
self.lFPGAVersion = int(0)
self.dMaxDistance = float(0)
self.dMinDistance = float(0)
self.dFieldOfView = float(0)
self.dMaxTrackingSpeed = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.atrType = packet_elements[0]
self.cATRName = packet_elements[1]
self.lMajFirmwareVersion = packet_elements[2]
self.lMinFirmwareVersion = packet_elements[3]
self.lBuildFirmwareVersion = packet_elements[4]
self.lHardwareVersion = packet_elements[5]
self.lErrorcode = packet_elements[6]
self.lFPGAVersion = packet_elements[7]
self.dMaxDistance = packet_elements[8]
self.dMinDistance = packet_elements[9]
self.dFieldOfView = packet_elements[10]
self.dMaxTrackingSpeed = packet_elements[11]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.atrType,)
packet_elements += (self.cATRName,)
packet_elements += (self.lMajFirmwareVersion,)
packet_elements += (self.lMinFirmwareVersion,)
packet_elements += (self.lBuildFirmwareVersion,)
packet_elements += (self.lHardwareVersion,)
packet_elements += (self.lErrorcode,)
packet_elements += (self.lFPGAVersion,)
packet_elements += (self.dMaxDistance,)
packet_elements += (self.dMinDistance,)
packet_elements += (self.dFieldOfView,)
packet_elements += (self.dMaxTrackingSpeed,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetLaserOnTimerCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [8]
self.__formats = [('<i i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetLaserOnTimer
self.iLaserOnTimeOffsetHour = int(0)
self.iLaserOnTimeOffsetMinute = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iLaserOnTimeOffsetHour = packet_elements[0]
self.iLaserOnTimeOffsetMinute = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iLaserOnTimeOffsetHour,)
packet_elements += (self.iLaserOnTimeOffsetMinute,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetLaserOnTimerRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetLaserOnTimer
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetLaserOnTimerCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetLaserOnTimer
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetLaserOnTimerRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = [8]
self.__formats = [('<i i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetLaserOnTimer
self.iLaserOnTimeOffsetHour = int(0)
self.iLaserOnTimeOffsetMinute = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.iLaserOnTimeOffsetHour = packet_elements[0]
self.iLaserOnTimeOffsetMinute = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.iLaserOnTimeOffsetHour,)
packet_elements += (self.iLaserOnTimeOffsetMinute,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetFaceCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetFace
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetFaceRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetFace
self.trackerFace = int(0) # ES_TrackerFace
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.trackerFace = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.trackerFace,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetLongSystemParamCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 20
self.__sizes = [8]
self.__formats = [('<I i ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetLongSystemParameter
self.systemParam = int(0) # ES_SystemParameter
self.lParameter = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.systemParam = packet_elements[0]
self.lParameter = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.systemParam,)
packet_elements += (self.lParameter,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetLongSystemParamRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetLongSystemParameter
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetLongSystemParamCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetLongSystemParameter
self.systemParam = int(0) # ES_SystemParameter
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.systemParam = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.systemParam,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetLongSystemParamRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = [8]
self.__formats = [('<I i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetLongSystemParameter
self.systemParam = int(0) # ES_SystemParameter
self.lParameter = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.systemParam = packet_elements[0]
self.lParameter = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.systemParam,)
packet_elements += (self.lParameter,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetObjectTemperatureCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetObjectTemperature
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetObjectTemperatureRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = [8]
self.__formats = [('<d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetObjectTemperature
self.dObjectTemperature = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dObjectTemperature = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dObjectTemperature,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class ClearCommandQueueCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ClearCommandQueue
self.clearQueueType = int(0) # ES_ClearCommandQueueType
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.clearQueueType = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.clearQueueType,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class ClearCommandQueueRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_ClearCommandQueue
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetOverviewCameraInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetOverviewCameraInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetOverviewCameraInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 120
self.__sizes = [104]
self.__formats = [('<I 64s i d d d i i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetOverviewCameraInfo
self.cameraType = int(0) # ES_OverviewCameraType
self.cCameraName = b'' # 64 bytes max
self.bIsColorCamera = int(0)
self.dFocalLength = float(0)
self.dHorizontalChipSize = float(0)
self.dVerticalChipSize = float(0)
self.bMirrorImageHz = int(0)
self.bMirrorImageVt = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.cameraType = packet_elements[0]
self.cCameraName = packet_elements[1]
self.bIsColorCamera = packet_elements[2]
self.dFocalLength = packet_elements[3]
self.dHorizontalChipSize = packet_elements[4]
self.dVerticalChipSize = packet_elements[5]
self.bMirrorImageHz = packet_elements[6]
self.bMirrorImageVt = packet_elements[7]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.cameraType,)
packet_elements += (self.cCameraName,)
packet_elements += (self.bIsColorCamera,)
packet_elements += (self.dFocalLength,)
packet_elements += (self.dHorizontalChipSize,)
packet_elements += (self.dVerticalChipSize,)
packet_elements += (self.bMirrorImageHz,)
packet_elements += (self.bMirrorImageVt,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetDoubleSystemParamCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = [4]
self.__formats = [('<I ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetDoubleSystemParameter
self.systemParam = int(0) # ES_SystemParameter
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.systemParam = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.systemParam,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetDoubleSystemParamRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 28
self.__sizes = [12]
self.__formats = [('<I d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetDoubleSystemParameter
self.systemParam = int(0) # ES_SystemParameter
self.dParameter = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.systemParam = packet_elements[0]
self.dParameter = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.systemParam,)
packet_elements += (self.dParameter,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetDoubleSystemParamCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 24
self.__sizes = [12]
self.__formats = [('<I d ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetDoubleSystemParameter
self.systemParam = int(0) # ES_SystemParameter
self.dParameter = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.systemParam = packet_elements[0]
self.dParameter = packet_elements[1]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.systemParam,)
packet_elements += (self.dParameter,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class SetDoubleSystemParamRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_SetDoubleSystemParameter
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class RestoreStartupConditionsCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_RestoreStartupConditions
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class RestoreStartupConditionsRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_RestoreStartupConditions
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GoAndMeasureCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 36
self.__sizes = [24]
self.__formats = [('<d d d ')]
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoAndMeasure
self.dVal1 = float(0)
self.dVal2 = float(0)
self.dVal3 = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.dVal1 = packet_elements[0]
self.dVal2 = packet_elements[1]
self.dVal3 = packet_elements[2]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.dVal1,)
packet_elements += (self.dVal2,)
packet_elements += (self.dVal3,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GoAndMeasureRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 16
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GoAndMeasure
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetMeteoStationInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetMeteoStationInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetMeteoStationInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 92
self.__sizes = [76]
self.__formats = [('<I 64s i i ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetMeteoStationInfo
self.meteoStationType = int(0) # ES_MeteoStationType
self.cIdentifier = b'' # 64 bytes max
self.iFirmwareMajorVersionNumber = int(0)
self.iFirmwareMinorVersionNumber = int(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.meteoStationType = packet_elements[0]
self.cIdentifier = packet_elements[1]
self.iFirmwareMajorVersionNumber = packet_elements[2]
self.iFirmwareMinorVersionNumber = packet_elements[3]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.meteoStationType,)
packet_elements += (self.cIdentifier,)
packet_elements += (self.iFirmwareMajorVersionNumber,)
packet_elements += (self.iFirmwareMinorVersionNumber,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetAT4xxInfoCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetAT4xxInfo
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetAT4xxInfoRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 184
self.__sizes = [168]
self.__formats = [('<I 64s i i i i i i i i i i I I i d d d d d d ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetAT4xxInfo
self.trackerType = int(0) # ES_LTSensorType
self.cTrackerName = b'' # 64 bytes max
self.lSerialNumber = int(0)
self.lMajorFirmwareVersion = int(0)
self.lMinorFirmwareVersion = int(0)
self.lProcessorBoardFWBuildNumber = int(0)
self.lSensorBoardFWBuildNumber = int(0)
self.lMajorOSVersion = int(0)
self.lMinorOSVersion = int(0)
self.lMajorServerSoftwareVersion = int(0)
self.lMinorServerSoftwareVersion = int(0)
self.lServerSoftwareBuildNumber = int(0)
self.wlanType = int(0) # ES_WLANType
self.xscaleType = int(0) # ES_TPMicroProcessorType
self.lMinMeasureTime = int(0)
self.dMinDistance = float(0)
self.dMaxDistance = float(0)
self.dStdDevDistOffsetADM = float(0)
self.dStdDevAngleConst = float(0)
self.dStdDevAngleOffset = float(0)
self.dStdDevAngleFactor = float(0)
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.trackerType = packet_elements[0]
self.cTrackerName = packet_elements[1]
self.lSerialNumber = packet_elements[2]
self.lMajorFirmwareVersion = packet_elements[3]
self.lMinorFirmwareVersion = packet_elements[4]
self.lProcessorBoardFWBuildNumber = packet_elements[5]
self.lSensorBoardFWBuildNumber = packet_elements[6]
self.lMajorOSVersion = packet_elements[7]
self.lMinorOSVersion = packet_elements[8]
self.lMajorServerSoftwareVersion = packet_elements[9]
self.lMinorServerSoftwareVersion = packet_elements[10]
self.lServerSoftwareBuildNumber = packet_elements[11]
self.wlanType = packet_elements[12]
self.xscaleType = packet_elements[13]
self.lMinMeasureTime = packet_elements[14]
self.dMinDistance = packet_elements[15]
self.dMaxDistance = packet_elements[16]
self.dStdDevDistOffsetADM = packet_elements[17]
self.dStdDevAngleConst = packet_elements[18]
self.dStdDevAngleOffset = packet_elements[19]
self.dStdDevAngleFactor = packet_elements[20]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.trackerType,)
packet_elements += (self.cTrackerName,)
packet_elements += (self.lSerialNumber,)
packet_elements += (self.lMajorFirmwareVersion,)
packet_elements += (self.lMinorFirmwareVersion,)
packet_elements += (self.lProcessorBoardFWBuildNumber,)
packet_elements += (self.lSensorBoardFWBuildNumber,)
packet_elements += (self.lMajorOSVersion,)
packet_elements += (self.lMinorOSVersion,)
packet_elements += (self.lMajorServerSoftwareVersion,)
packet_elements += (self.lMinorServerSoftwareVersion,)
packet_elements += (self.lServerSoftwareBuildNumber,)
packet_elements += (self.wlanType,)
packet_elements += (self.xscaleType,)
packet_elements += (self.lMinMeasureTime,)
packet_elements += (self.dMinDistance,)
packet_elements += (self.dMaxDistance,)
packet_elements += (self.dStdDevDistOffsetADM,)
packet_elements += (self.dStdDevAngleConst,)
packet_elements += (self.dStdDevAngleOffset,)
packet_elements += (self.dStdDevAngleFactor,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
class GetSystemSoftwareVersionCT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 12
self.__sizes = []
self.__formats = []
self.packetInfo = BasicCommandCT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSystemSoftwareVersion
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
return packet
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
return self.packet
class GetSystemSoftwareVersionRT(object):
def __init__(self):
self.packet = b''
self.__packet_size = 80
self.__sizes = [64]
self.__formats = [('<64s ')]
self.packetInfo = BasicCommandRT()
self.packetInfo.packetHeader.lPacketSize = self.__packet_size
self.packetInfo.packetHeader.type = ES_DT_Command
self.packetInfo.command = ES_C_GetSystemSoftwareVersion
self.cSoftwareVersion = b'' # 64 bytes max
def unpack(self, packet):
self.packet = packet
packet = self.packetInfo.unpack(packet)
packet_elements = struct.Struct(self.__formats[0]).unpack(packet[:self.__sizes[0]])
self.cSoftwareVersion = packet_elements[0]
return packet[self.__sizes[0]:]
def pack(self):
self.packet = b''
self.packet += self.packetInfo.pack()
packet_elements = ()
packet_elements += (self.cSoftwareVersion,)
self.packet += struct.Struct(self.__formats[0]).pack(*packet_elements)
return self.packet
def packetType(packet):
attributes = dir(packet)
if 'packetHeader' in attributes:
return packet.packetHeader.type
elif 'packetInfo' in attributes:
return packetType(packet.packetInfo)
else:
return None
class PacketFactory(object):
def packet(self, data, return_type=True):
packet_header = PacketHeaderT()
packet_header.unpack(data)
if return_type:
packet_info = BasicCommandRT()
else:
packet_info = BasicCommandCT()
packet = None
if packet_header.type == ES_DT_Command:
packet_info.unpack(data)
if False:
pass
elif packet_header.type == ES_DT_NivelResult:
packet = NivelResultT()
elif packet_header.type == ES_DT_ReflectorPosResult:
packet = ReflectorPosResultT()
elif packet_header.type == ES_DT_SingleMeasResult:
packet = SingleMeasResultT()
elif packet_header.type == ES_DT_SingleMeasResult2:
packet = SingleMeasResult2T()
elif packet_header.type == ES_DT_SystemStatusChange:
packet = SystemStatusChangeT()
elif packet_header.type == ES_DT_Error:
packet = ErrorResponseT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_Initialize:
if return_type:
packet = InitializeRT()
else:
packet = InitializeCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_ActivateCameraView:
if return_type:
packet = ActivateCameraViewRT()
else:
packet = ActivateCameraViewCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_Park:
if return_type:
packet = ParkRT()
else:
packet = ParkCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GoBirdBath:
if return_type:
packet = GoBirdBathRT()
else:
packet = GoBirdBathCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GoBirdBath2:
if return_type:
packet = GoBirdBath2RT()
else:
packet = GoBirdBath2CT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_ChangeFace:
if return_type:
packet = ChangeFaceRT()
else:
packet = ChangeFaceCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_StartNivelMeasurement:
if return_type:
packet = StartNivelMeasurementRT()
else:
packet = StartNivelMeasurementCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_StartMeasurement:
if return_type:
packet = StartMeasurementRT()
else:
packet = StartMeasurementCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_StopMeasurement:
if return_type:
packet = StopMeasurementRT()
else:
packet = StopMeasurementCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_ExitApplication:
if return_type:
packet = ExitApplicationRT()
else:
packet = ExitApplicationCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GoLastMeasuredPoint:
if return_type:
packet = GoLastMeasuredPointRT()
else:
packet = GoLastMeasuredPointCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_FindReflector:
if return_type:
packet = FindReflectorRT()
else:
packet = FindReflectorCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetCoordinateSystemType:
if return_type:
packet = SetCoordinateSystemTypeRT()
else:
packet = SetCoordinateSystemTypeCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetCoordinateSystemType:
if return_type:
packet = GetCoordinateSystemTypeRT()
else:
packet = GetCoordinateSystemTypeCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetMeasurementMode:
if return_type:
packet = SetMeasurementModeRT()
else:
packet = SetMeasurementModeCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetMeasurementMode:
if return_type:
packet = GetMeasurementModeRT()
else:
packet = GetMeasurementModeCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetSearchParams:
if return_type:
packet = SetSearchParamsRT()
else:
packet = SetSearchParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetSearchParams:
if return_type:
packet = GetSearchParamsRT()
else:
packet = GetSearchParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetStationaryModeParams:
if return_type:
packet = SetStationaryModeParamsRT()
else:
packet = SetStationaryModeParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetStationaryModeParams:
if return_type:
packet = GetStationaryModeParamsRT()
else:
packet = GetStationaryModeParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetSystemSettings:
if return_type:
packet = SetSystemSettingsRT()
else:
packet = SetSystemSettingsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetSystemSettings:
if return_type:
packet = GetSystemSettingsRT()
else:
packet = GetSystemSettingsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetUnits:
if return_type:
packet = SetUnitsRT()
else:
packet = SetUnitsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetUnits:
if return_type:
packet = GetUnitsRT()
else:
packet = GetUnitsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetSystemStatus:
if return_type:
packet = GetSystemStatusRT()
else:
packet = GetSystemStatusCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetMeasurementStatusInfo:
if return_type:
packet = GetMeasurementStatusInfoRT()
else:
packet = GetMeasurementStatusInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetTrackerStatus:
if return_type:
packet = GetTrackerStatusRT()
else:
packet = GetTrackerStatusCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetReflector:
if return_type:
packet = SetReflectorRT()
else:
packet = SetReflectorCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetReflectors:
if return_type:
packet = GetReflectorsRT()
else:
packet = GetReflectorsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetReflector:
if return_type:
packet = GetReflectorRT()
else:
packet = GetReflectorCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetEnvironmentParams:
if return_type:
packet = SetEnvironmentParamsRT()
else:
packet = SetEnvironmentParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetEnvironmentParams:
if return_type:
packet = GetEnvironmentParamsRT()
else:
packet = GetEnvironmentParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetRefractionParams:
if return_type:
packet = SetRefractionParamsRT()
else:
packet = SetRefractionParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetRefractionParams:
if return_type:
packet = GetRefractionParamsRT()
else:
packet = GetRefractionParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetStationOrientationParams:
if return_type:
packet = SetStationOrientationParamsRT()
else:
packet = SetStationOrientationParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetStationOrientationParams:
if return_type:
packet = GetStationOrientationParamsRT()
else:
packet = GetStationOrientationParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetTransformationParams:
if return_type:
packet = SetTransformationParamsRT()
else:
packet = SetTransformationParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetTransformationParams:
if return_type:
packet = GetTransformationParamsRT()
else:
packet = GetTransformationParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GoPosition:
if return_type:
packet = GoPositionRT()
else:
packet = GoPositionCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetDirection:
if return_type:
packet = GetDirectionRT()
else:
packet = GetDirectionCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GoPositionHVD:
if return_type:
packet = GoPositionHVDRT()
else:
packet = GoPositionHVDCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_PointLaser:
if return_type:
packet = PointLaserRT()
else:
packet = PointLaserCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_PositionRelativeHV:
if return_type:
packet = PositionRelativeHVRT()
else:
packet = PositionRelativeHVCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_PointLaserHVD:
if return_type:
packet = PointLaserHVDRT()
else:
packet = PointLaserHVDCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_MoveHV:
if return_type:
packet = MoveHVRT()
else:
packet = MoveHVCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GoNivelPosition:
if return_type:
packet = GoNivelPositionRT()
else:
packet = GoNivelPositionCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_CallOrientToGravity:
if return_type:
packet = CallOrientToGravityRT()
else:
packet = CallOrientToGravityCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetCompensation:
if return_type:
packet = SetCompensationRT()
else:
packet = SetCompensationCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetCompensation:
if return_type:
packet = GetCompensationRT()
else:
packet = GetCompensationCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetCompensations:
if return_type:
packet = GetCompensationsRT()
else:
packet = GetCompensationsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetCompensations2:
if return_type:
packet = GetCompensations2RT()
else:
packet = GetCompensations2CT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetStatisticMode:
if return_type:
packet = SetStatisticModeRT()
else:
packet = SetStatisticModeCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetStatisticMode:
if return_type:
packet = GetStatisticModeRT()
else:
packet = GetStatisticModeCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetCameraParams:
if return_type:
packet = SetCameraParamsRT()
else:
packet = SetCameraParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetCameraParams:
if return_type:
packet = GetCameraParamsRT()
else:
packet = GetCameraParamsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetADMInfo2:
if return_type:
packet = GetADMInfo2RT()
else:
packet = GetADMInfo2CT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetNivelInfo:
if return_type:
packet = GetNivelInfoRT()
else:
packet = GetNivelInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetNivelInfo2:
if return_type:
packet = GetNivelInfo2RT()
else:
packet = GetNivelInfo2CT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetTPInfo:
if return_type:
packet = GetTPInfoRT()
else:
packet = GetTPInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetTrackerInfo:
if return_type:
packet = GetTrackerInfoRT()
else:
packet = GetTrackerInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetATRInfo:
if return_type:
packet = GetATRInfoRT()
else:
packet = GetATRInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetLaserOnTimer:
if return_type:
packet = SetLaserOnTimerRT()
else:
packet = SetLaserOnTimerCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetLaserOnTimer:
if return_type:
packet = GetLaserOnTimerRT()
else:
packet = GetLaserOnTimerCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetFace:
if return_type:
packet = GetFaceRT()
else:
packet = GetFaceCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetLongSystemParameter:
if return_type:
packet = SetLongSystemParamRT()
else:
packet = SetLongSystemParamCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetLongSystemParameter:
if return_type:
packet = GetLongSystemParamRT()
else:
packet = GetLongSystemParamCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetObjectTemperature:
if return_type:
packet = GetObjectTemperatureRT()
else:
packet = GetObjectTemperatureCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_ClearCommandQueue:
if return_type:
packet = ClearCommandQueueRT()
else:
packet = ClearCommandQueueCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetOverviewCameraInfo:
if return_type:
packet = GetOverviewCameraInfoRT()
else:
packet = GetOverviewCameraInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetDoubleSystemParameter:
if return_type:
packet = GetDoubleSystemParamRT()
else:
packet = GetDoubleSystemParamCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_SetDoubleSystemParameter:
if return_type:
packet = SetDoubleSystemParamRT()
else:
packet = SetDoubleSystemParamCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_RestoreStartupConditions:
if return_type:
packet = RestoreStartupConditionsRT()
else:
packet = RestoreStartupConditionsCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GoAndMeasure:
if return_type:
packet = GoAndMeasureRT()
else:
packet = GoAndMeasureCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetMeteoStationInfo:
if return_type:
packet = GetMeteoStationInfoRT()
else:
packet = GetMeteoStationInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetAT4xxInfo:
if return_type:
packet = GetAT4xxInfoRT()
else:
packet = GetAT4xxInfoCT()
elif packet_header.type == ES_DT_Command and packet_info.command == ES_C_GetSystemSoftwareVersion:
if return_type:
packet = GetSystemSoftwareVersionRT()
else:
packet = GetSystemSoftwareVersionCT()
packet.unpack(data)
return packet
| 32.476025 | 105 | 0.708209 | 20,748 | 177,449 | 5.786293 | 0.046173 | 0.119946 | 0.041048 | 0.042981 | 0.754136 | 0.752553 | 0.748863 | 0.744032 | 0.741841 | 0.739526 | 0 | 0.016124 | 0.183202 | 177,449 | 5,463 | 106 | 32.48197 | 0.812177 | 0.01317 | 0 | 0.742898 | 1 | 0.000412 | 0.003938 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.106628 | false | 0.000206 | 0.000618 | 0 | 0.214492 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1314e6436e2f2d538cb6b2f45f2f975ec9280fdb | 8,891 | py | Python | libs/multiagent-competition/gym-compete/gym_compete/new_envs/utils.py | maxgold/icml22 | 49f026dd2314091639b52f5b8364a29e8000b738 | [
"MIT"
] | null | null | null | libs/multiagent-competition/gym-compete/gym_compete/new_envs/utils.py | maxgold/icml22 | 49f026dd2314091639b52f5b8364a29e8000b738 | [
"MIT"
] | null | null | null | libs/multiagent-competition/gym-compete/gym_compete/new_envs/utils.py | maxgold/icml22 | 49f026dd2314091639b52f5b8364a29e8000b738 | [
"MIT"
] | null | null | null | import xml.etree.ElementTree as ET
import colorsys
import numpy as np
def list_filter(lambda_fn, iterable):
return list(filter(lambda_fn, iterable))
def get_distinct_colors(n=2):
'''
taken from: https://stackoverflow.com/a/876872
'''
HSV_tuples = [(x*1.0/n, 0.5, 0.5) for x in range(n)]
RGB_tuples = map(lambda x: colorsys.hsv_to_rgb(*x), HSV_tuples)
return RGB_tuples
def set_class(root, prop, agent_class):
if root is None:
return
# root_class = root.get('class')
if root.tag == prop:
root.set('class', agent_class)
children = list(root)
for child in children:
set_class(child, prop, agent_class)
def set_geom_class(root, name):
set_class(root, 'geom', name)
def set_motor_class(root, name):
set_class(root, 'motor', name)
def add_prefix(root, prop, prefix, force_set=False):
if root is None:
return
root_prop_val = root.get(prop)
if root_prop_val is not None:
root.set(prop, prefix + '/' + root_prop_val)
elif force_set:
root.set(prop, prefix + '/' + 'anon' + str(np.random.randint(1, 1e10)))
children = list(root)
for child in children:
add_prefix(child, prop, prefix, force_set)
def tuple_to_str(tp):
return " ".join(map(str, tp))
def create_multiagent_xml(
world_xml, all_agent_xmls, agent_scopes=None,
outdir='/tmp/', outpath=None, ini_pos=None, rgb=None
):
world = ET.parse(world_xml)
world_root = world.getroot()
world_default = world_root.find('default')
world_body = world_root.find('worldbody')
world_actuator = None
world_tendons = None
n_agents = len(all_agent_xmls)
if rgb is None:
rgb = get_distinct_colors(n_agents)
RGB_tuples = list(
map(lambda x: tuple_to_str(x), rgb)
)
if agent_scopes is None:
agent_scopes = ['agent' + str(i) for i in range(n_agents)]
if ini_pos is None:
ini_pos = [(-i, 0, 0.75) for i in np.linspace(-n_agents, n_agents, n_agents)]
# ini_pos = list(map(lambda x: tuple_to_str(x), ini_pos))
for i in range(n_agents):
agent_default = ET.SubElement(
world_default, 'default', attrib={'class': agent_scopes[i]}
)
rgba = RGB_tuples[i] + " 1"
agent_xml = ET.parse(all_agent_xmls[i])
default = agent_xml.find('default')
color_set = False
for child in list(default):
if child.tag == 'geom':
child.set('rgba', rgba)
color_set = True
agent_default.append(child)
if not color_set:
agent_geom = ET.SubElement(
agent_default, 'geom',
attrib={'contype': '1', 'conaffinity': '1', 'rgba': rgba}
)
agent_body = agent_xml.find('body')
if agent_body.get('pos'):
oripos = list(map(float, agent_body.get('pos').strip().split(" ")))
# keep original y and z coordinates
pos = list(ini_pos[i])
# pos[1] = oripos[1]
# pos[2] = oripos[2]
# print(tuple_to_str(pos))
agent_body.set('pos', tuple_to_str(pos))
# add class to all geoms
set_geom_class(agent_body, agent_scopes[i])
# add prefix to all names, important to map joints
add_prefix(agent_body, 'name', agent_scopes[i], force_set=True)
# add aggent body to xml
world_body.append(agent_body)
# get agent actuators
agent_actuator = agent_xml.find('actuator')
# add same prefix to all motor joints
add_prefix(agent_actuator, 'joint', agent_scopes[i])
add_prefix(agent_actuator, 'name', agent_scopes[i])
# add actuator
set_motor_class(agent_actuator, agent_scopes[i])
if world_actuator is None:
world_root.append(agent_actuator)
world_actuator = world_root.find('actuator')
# print(world_actuator)
# print(ET.tostring(world_root))
else:
for motor in list(agent_actuator):
world_actuator.append(motor)
# get agent tendons if exists
agent_tendon = agent_xml.find('tendon')
if agent_tendon:
# add same prefix to all motor joints
add_prefix(agent_tendon, 'joint', agent_scopes[i])
add_prefix(agent_tendon, 'name', agent_scopes[i])
# add tendon
if world_tendons is None:
world_root.append(agent_tendon)
world_tendons = world_root.find('tendon')
# print(world_actuator)
# print(ET.tostring(world_root))
else:
for tendon in list(agent_tendon):
world_tendons.append(tendon)
if outpath is None:
outname = world_xml.split("/")[-1].split(".xml")[0] + '.' + ".".join(map(lambda x: x.split("/")[-1].split(".xml")[0], all_agent_xmls)) + ".xml"
outpath = outdir + '/' + outname
world.write(outpath)
return ET.tostring(world_root), outpath
def create_multiagent_xml_gym(
world_xml, all_agent_xmls, agent_scopes=None,
outdir='/tmp/', outpath=None, ini_pos=None, rgb=None
):
world = ET.parse(world_xml)
world_root = world.getroot()
world_default = world_root.find('default')
world_body = world_root.find('worldbody')
world_actuator = None
world_tendons = None
n_agents = len(all_agent_xmls)
if rgb is None:
rgb = get_distinct_colors(n_agents)
RGB_tuples = list(
map(lambda x: tuple_to_str(x), rgb)
)
if agent_scopes is None:
agent_scopes = ['agent' + str(i) for i in range(n_agents)]
if ini_pos is None:
ini_pos = [(-i, 0, 0.75) for i in np.linspace(-n_agents, n_agents, n_agents)]
# ini_pos = list(map(lambda x: tuple_to_str(x), ini_pos))
for i in range(n_agents):
agent_default = ET.SubElement(
world_default, 'default', attrib={'class': agent_scopes[i]}
)
rgba = RGB_tuples[i] + " 1"
agent_xml = ET.parse(all_agent_xmls[i])
default = agent_xml.find('default')
color_set = False
for child in list(default):
if child.tag == 'geom':
child.set('rgba', rgba)
color_set = True
agent_default.append(child)
if not color_set:
agent_geom = ET.SubElement(
agent_default, 'geom',
attrib={'contype': '1', 'conaffinity': '1', 'rgba': rgba}
)
wagent_body = agent_xml.find("worldbody")
agent_body = wagent_body.find('body')
if agent_body.get('pos'):
oripos = list(map(float, agent_body.get('pos').strip().split(" ")))
# keep original y and z coordinates
pos = list(ini_pos[i])
# pos[1] = oripos[1]
# pos[2] = oripos[2]
# print(tuple_to_str(pos))
agent_body.set('pos', tuple_to_str(pos))
# add class to all geoms
set_geom_class(agent_body, agent_scopes[i])
# add prefix to all names, important to map joints
add_prefix(agent_body, 'name', agent_scopes[i], force_set=True)
# add aggent body to xml
world_body.append(agent_body)
# get agent actuators
agent_actuator = agent_xml.find('actuator')
# add same prefix to all motor joints
add_prefix(agent_actuator, 'joint', agent_scopes[i])
add_prefix(agent_actuator, 'name', agent_scopes[i])
# add actuator
set_motor_class(agent_actuator, agent_scopes[i])
if world_actuator is None:
world_root.append(agent_actuator)
world_actuator = world_root.find('actuator')
# print(world_actuator)
# print(ET.tostring(world_root))
else:
for motor in list(agent_actuator):
world_actuator.append(motor)
# get agent tendons if exists
agent_tendon = agent_xml.find('tendon')
if agent_tendon:
# add same prefix to all motor joints
add_prefix(agent_tendon, 'joint', agent_scopes[i])
add_prefix(agent_tendon, 'name', agent_scopes[i])
# add tendon
if world_tendons is None:
world_root.append(agent_tendon)
world_tendons = world_root.find('tendon')
# print(world_actuator)
# print(ET.tostring(world_root))
else:
for tendon in list(agent_tendon):
world_tendons.append(tendon)
if outpath is None:
outname = world_xml.split("/")[-1].split(".xml")[0] + '.' + ".".join(map(lambda x: x.split("/")[-1].split(".xml")[0], all_agent_xmls)) + ".xml"
outpath = outdir + '/' + outname
world.write(outpath)
return ET.tostring(world_root), outpath
| 37.200837 | 152 | 0.593634 | 1,181 | 8,891 | 4.243014 | 0.108383 | 0.048294 | 0.038316 | 0.029934 | 0.859709 | 0.849331 | 0.830573 | 0.817003 | 0.817003 | 0.817003 | 0 | 0.007434 | 0.288944 | 8,891 | 238 | 153 | 37.357143 | 0.785195 | 0.120796 | 0 | 0.79096 | 0 | 0 | 0.046104 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.050847 | false | 0 | 0.016949 | 0.011299 | 0.107345 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1344a1321b211e27bc79dc645490021c17932f45 | 33,274 | py | Python | niftypad/models.py | JJiao/NiftyPAD | 9e347410d7d1b753c1afd125d5839946f5e77379 | [
"Apache-2.0"
] | null | null | null | niftypad/models.py | JJiao/NiftyPAD | 9e347410d7d1b753c1afd125d5839946f5e77379 | [
"Apache-2.0"
] | 1 | 2020-01-26T01:22:32.000Z | 2020-01-26T01:22:32.000Z | niftypad/models.py | JJiao/NiftyPAD | 9e347410d7d1b753c1afd125d5839946f5e77379 | [
"Apache-2.0"
] | 5 | 2019-05-28T15:02:32.000Z | 2021-02-03T13:03:28.000Z | __author__ = 'jieqing jiao'
__email__ = "jieqing.jiao@gmail.com"
import numpy as np
import scipy
from scipy.optimize import curve_fit
from scipy.stats import linregress
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
from scipy.interpolate import interp1d
import inspect
from niftypad import kt
from niftypad import kp
from niftypad import basis
# # for debugging
from numpy import savetxt
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # linear models
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
# srtmb_basis - srtm model for img with pre-calculated basis functions
def srtmb_basis(tac, b):
tac[tac < 0] = 0.0
n_beta = b['beta'].size
ssq = np.zeros(n_beta)
if b['w'] is None:
b['w'] = 1
for i in range(0, n_beta):
theta = np.dot(b['m_w'][i * 2:i * 2 + 2, :], b['w'] * tac)
a = np.column_stack((b['input'], b['basis'][i, :]))
tacf = np.dot(a, theta)
res = (tac - tacf) * b['w']
ssq[i] = np.sum(res ** 2)
i = np.argmin(ssq)
theta = np.dot(b['m_w'][i * 2:i * 2 + 2, :], b['w'] * tac)
a = np.column_stack((b['input'], b['basis'][i, :]))
tacf = np.dot(a, theta)
theta = np.append(theta, b['beta'][i])
r1, k2, bp = kp.srtm_theta2kp(theta)
kps = {'r1': r1, 'k2': k2, 'bp': bp, 'tacf': tacf}
return kps
def srtmb_basis_para2tac(r1, k2, bp, b):
tacf = []
theta = kp.srtm_kp2theta(r1, k2, bp)
i = np.argwhere(abs(b['beta'] - theta[-1]) < 1e-10).squeeze()
if not i == []:
a = np.column_stack((b['input'], b['basis'][i, :]))
tacf = np.dot(a, theta[:-1])
kps = {'tacf': tacf}
return kps
# srtmb - srtm model for tac, basis functions will be calculated
def srtmb(tac, dt, inputf1, beta_lim, n_beta, w):
tac[tac < 0] = 0.0
b = basis.make_basis(inputf1, dt, beta_lim=beta_lim, n_beta=n_beta, w=w)
kps = srtmb_basis(tac, b)
return kps
# srtmb_asl_basis - srtm model for tac with fixed R1 and pre-calculated basis functions
def srtmb_asl_basis(tac, b, r1):
tac[tac < 0] = 0.0
n_beta = b['beta'].size
ssq = np.zeros(n_beta)
if b['w'] is None:
b['w'] = 1
y = tac - r1 * b['input']
for i in range(0, n_beta):
theta = r1
theta = np.append(theta, np.dot(y, b['basis'][i, :]) / np.dot(b['basis'][i, :], b['basis'][i,
:])) # w won't make a difference here
a = np.column_stack((b['input'], b['basis'][i, :]))
tacf = np.dot(a, theta)
res = (tac - tacf) * b['w'] # w works here
ssq[i] = np.sum(res ** 2)
i = np.argmin(ssq)
theta = r1
theta = np.append(theta, np.dot(y, b['basis'][i, :]) / np.dot(b['basis'][i, :], b['basis'][i, :]))
a = np.column_stack((b['input'], b['basis'][i, :]))
tacf = np.dot(a, theta)
theta = np.append(theta, b['beta'][i])
r1, k2, bp = kp.srtm_theta2kp(theta)
kps = {'r1': r1, 'k2': k2, 'bp': bp, 'tacf': tacf}
return kps
# srtmb_asl - srtm model for tac with fixed R1, basis functions will be calculated
def srtmb_asl(tac, dt, inputf1, beta_lim, n_beta, w, r1):
tac[tac < 0] = 0.0
b = basis.make_basis(inputf1, dt, beta_lim=beta_lim, n_beta=n_beta, w=w)
kps = srtmb_asl_basis(tac, b, r1)
return kps
# srtmb_k2p_basis - srtm model for img with fixed k2p and pre-calculated basis functions
def srtmb_k2p_basis(tac, b):
tac[tac < 0] = 0.0
n_beta = b['beta'].size
ssq = np.zeros(n_beta)
if b['w'] is None:
b['w'] = 1
for i in range(0, n_beta):
r1 = np.sum(b['w'] * b['basis_k2p'][i] * tac) / np.sum(b['w'] * b['basis_k2p'][i] ** 2)
ssq[i] = np.sum(b['w'] * (tac - r1 * b['basis_k2p'][i]) ** 2)
i = np.argmin(ssq)
r1 = np.sum(b['w'] * b['basis_k2p'][i] * tac) / np.sum(b['w'] * b['basis_k2p'][i] ** 2)
tacf = r1 * b['basis_k2p'][i]
theta = r1
theta = np.append(theta, r1 * (b['k2p'] - b['beta'][i]))
theta = np.append(theta, b['beta'][i])
r1, k2, bp = kp.srtm_theta2kp(theta)
kps = {'r1': r1, 'k2': k2, 'bp': bp, 'tacf': tacf}
return kps
def srtmb_k2p_basis_para2tac(r1, k2, bp, b):
tacf = []
theta = kp.srtm_kp2theta(r1, k2, bp)
i = np.argwhere(abs(b['beta'] - theta[-1]) < 1e-10).squeeze()
if not i == []:
tacf = theta[0] * b['basis_k2p'][i]
kps = {'tacf': tacf}
return kps
# srtmb_k2p - srtm model for tac with fixed k2p, basis functions will be calculated
def srtmb_k2p(tac, dt, inputf1, beta_lim, n_beta, w, k2p):
tac[tac < 0] = 0.0
b = basis.make_basis(inputf1, dt, beta_lim=beta_lim, n_beta=n_beta, w=w, k2p=k2p)
kps = srtmb_k2p_basis(tac, b)
return kps
# # # # # # graphic models
# logan_ref - logan reference plot without fixed k2p for tac, based on eq.7 in
# "Distribution Volume Ratios Without Blood Sampling from Graphical Analysis of PET Data"
def logan_ref(tac, dt, inputf1, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# input_dt = kt.int2dt(inputf1, dt)
inputff = interp1d(np.arange(len(inputf1)), inputf1, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
# calculate integration
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
input_cum = np.cumsum((input_dt[:-1] + input_dt[1:]) / 2 * tdur)
# tac_cum and input_cum are calculated in such a way to match tac
# # # debug
# file_path = '/Users/Himiko/data/amsterdam_data_pib/transfer_162391_files_89f2cba1/'
# savetxt(file_path + 'tac_cum.csv', tac_cum)
# savetxt(file_path + 'input_cum.csv', input_cum)
#
# tac1 = kt.interpt1(mft, tac, dt)
# input1 = kt.interpt1(mft, input_dt, dt)
# tac_cum1 = np.cumsum(tac1)
# input_cum1 = np.cumsum(input1)
# tac_cum1_mft = tac_cum1[mft.astype(int)]
# input_cum1_mft = input_cum1[mft.astype(int)]
# savetxt(file_path + 'tac_cum1_mft.csv', tac_cum1_mft)
# savetxt(file_path + 'input_cum1_mft.csv', input_cum1_mft)
# # # debug
tac = tac[1:]
input_dt = input_dt[1:]
# yy = np.zeros(tac.shape)
# xx = np.zeros(tac.shape)
# mask = tac.nonzero()
# yy[mask] = tac_cum[mask] / tac[mask]
# xx[mask] = input_cum[mask] / tac[mask]
yy = tac_cum / (tac + 0.0000000000000001) # ADDED BY MY 20210616
xx = input_cum / (tac + 0.0000000000000001) # ADDED BY MY 20210616
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
# select tt for tac > 0
tt = np.logical_and(tt, tac > 0)
# select tt for xx < inf, yy < inf
infinf = 1e10
tt = np.logical_and(tt, xx < infinf)
tt = np.logical_and(tt, yy < infinf)
# do linear regression with selected tt
xx = xx[tt]
yy = yy[tt]
dvr, inter, _, _, _ = linregress(xx, yy)
bp = dvr - 1
yyf = dvr * xx + inter
if fig:
plt.plot(xx, yy, '.')
plt.plot(xx, yyf, 'r')
plt.show()
kps = {'bp': bp}
return kps
# logan_ref_k2p - logan reference plot with fixed k2p for tac, based on eq.6 in
# # "Distribution Volume Ratios Without Blood Sampling from Graphical Analysis of PET Data"
def logan_ref_k2p(tac, dt, inputf1, k2p, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# input_dt = kt.int2dt(inputf1, dt)
inputff = interp1d(np.arange(len(inputf1)), inputf1, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
input_cum = np.cumsum((input_dt[:-1] + input_dt[1:]) / 2 * tdur)
tac = tac[1:]
input_dt = input_dt[1:]
yy = tac_cum / (tac + 0.0000000000000001)
xx = (input_cum + input_dt / k2p) / (tac + 0.0000000000000001)
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
# select tt for tac > 0
tt = np.logical_and(tt, tac > 0)
# select tt for xx < inf, yy < inf
infinf = 1e10
tt = np.logical_and(tt, xx < infinf)
tt = np.logical_and(tt, yy < infinf)
# do linear regression with selected tt
xx = xx[tt]
yy = yy[tt]
dvr, inter, _, _, _ = linregress(xx, yy)
bp = dvr - 1
yyf = dvr * xx + inter
if fig:
plt.plot(xx, yy, '.')
plt.plot(xx[tt], yyf[tt], 'r')
plt.show()
kps = {'bp': bp}
return kps
# mrtm - Ichise's multilinear reference tissue model
def mrtm(tac, dt, inputf1, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# input_dt = kt.int2dt(inputf1, dt)
inputff = interp1d(np.arange(len(inputf1)), inputf1, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
input_cum = np.cumsum((input_dt[:-1] + input_dt[1:]) / 2 * tdur)
tac = tac[1:]
input_dt = input_dt[1:]
yy = tac
xx = np.column_stack((input_cum, tac_cum, input_dt))
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
mft = mft[1:]
reg = LinearRegression(fit_intercept=False).fit(xx[tt,], yy[tt])
bp = - reg.coef_[0] / reg.coef_[1] - 1
k2p = reg.coef_[0] / reg.coef_[2]
# for 1 TC
r1 = reg.coef_[2]
k2 = - reg.coef_[1]
if np.isnan(bp):
bp = 0
if np.isnan(r1):
r1 = 1.0
if r1 > 5:
r1 = 5
if r1 < -5:
r1 = -5
if bp > 10:
bp = 0
if bp < -10:
bp = 0
yyf = reg.predict(xx)
if fig:
plt.plot(mft, yy, '.')
plt.plot(mft, yyf, 'r')
plt.show()
kps = {'bp': bp, 'k2p': k2p, 'r1': r1, 'k2': k2}
return kps
# mrtm - Ichise's multilinear reference tissue model with fixed k2prime
def mrtm_k2p(tac, dt, inputf1, k2p, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# input_dt = kt.int2dt(inputf1,dt)
inputff = interp1d(np.arange(len(inputf1)), inputf1, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
input_cum = np.cumsum((input_dt[:-1] + input_dt[1:]) / 2 * tdur)
tac = tac[1:]
input_dt = input_dt[1:]
yy = tac
xx = np.column_stack((input_cum + 1 / k2p * input_dt, tac_cum))
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
mft = mft[1:]
reg = LinearRegression(fit_intercept=False).fit(xx[tt,], yy[tt])
bp = - reg.coef_[0] / reg.coef_[1] - 1
# for 1 TC
k2 = -reg.coef_[1]
r1 = reg.coef_[0] / k2p
if np.isnan(bp):
bp = 0
if np.isnan(r1):
r1 = 1.0
if r1 > 5:
r1 = 5
if r1 < -5:
r1 = -5
if bp > 10:
bp = 0
if bp < -10:
bp = 0
yyf = reg.predict(xx)
if fig:
plt.plot(mft, yy, '.')
plt.plot(mft, yyf, 'r')
plt.show()
kps = {'bp': bp, 'r1': r1}
return kps
# # # # # # graphic models - PPET version
# logan_ref - logan reference plot without fixed k2p for tac, based on eq.7 in
# "Distribution Volume Ratios Without Blood Sampling from Graphical Analysis of PET Data"
# PPET version: calculate input_dt and input_cum differently
def logan_ref_ppet(tac, dt, ref, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# get input_dt from ref.tac, if dt and ref.dt are the same
if np.array_equal(dt, ref.dt):
input_dt = ref.tac
else:
inputff = interp1d(kt.dt2mft(ref.dt), ref.tac, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
# calculate integration
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
# calculate input_cum using inputf1, and only until mid frame time
inputf1 = kt.interpt1(kt.dt2mft(ref.dt), ref.tac, dt)
input_cum1 = np.cumsum(inputf1)
input_cum1_mft = input_cum1[mft.astype(int)]
input_cum = input_cum1_mft
# tac_cum and input_cum are calculated in such a way to match tac
tac = tac[1:]
input_dt = input_dt[1:]
input_cum = input_cum[1:]
yy = tac_cum / (tac + 0.0000000000000001) # ADDED BY MY 20210616
xx = input_cum / (tac + 0.0000000000000001) # ADDED BY MY 20210616
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
# select tt for tac > 0
tt = np.logical_and(tt, tac > 0)
# select tt for xx < inf, yy < inf
infinf = 1e10
tt = np.logical_and(tt, xx < infinf)
tt = np.logical_and(tt, yy < infinf)
# do linear regression with selected tt
xx = xx[tt]
yy = yy[tt]
dvr, inter, _, _, _ = linregress(xx, yy)
bp = dvr - 1
yyf = dvr * xx + inter
if fig:
plt.plot(xx, yy, '.')
plt.plot(xx, yyf, 'r')
plt.show()
kps = {'bp': bp}
return kps
# logan_ref_k2p - logan reference plot with fixed k2p for tac, based on eq.6 in
# # "Distribution Volume Ratios Without Blood Sampling from Graphical Analysis of PET Data"
# PPET version: calculate input_dt and input_cum differently
def logan_ref_k2p_ppet(tac, dt, ref, k2p, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# get input_dt from ref.tac, if dt and ref.dt are the same
if np.array_equal(dt, ref.dt):
input_dt = ref.tac
else:
inputff = interp1d(kt.dt2mft(ref.dt), ref.tac, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
# calculate input_cum using inputf1, and only until mid frame time
inputf1 = kt.interpt1(kt.dt2mft(ref.dt), ref.tac, dt)
input_cum1 = np.cumsum(inputf1)
input_cum1_mft = input_cum1[mft.astype(int)]
input_cum = input_cum1_mft
# tac_cum and input_cum are calculated in such a way to match tac
tac = tac[1:]
input_dt = input_dt[1:]
input_cum = input_cum[1:]
yy = tac_cum / (tac + 0.0000000000000001)
xx = (input_cum + input_dt / k2p) / (tac + 0.0000000000000001)
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
# select tt for tac > 0
tt = np.logical_and(tt, tac > 0)
# select tt for xx < inf, yy < inf
infinf = 1e10
tt = np.logical_and(tt, xx < infinf)
tt = np.logical_and(tt, yy < infinf)
# do linear regression with selected tt
xx = xx[tt]
yy = yy[tt]
dvr, inter, _, _, _ = linregress(xx, yy)
bp = dvr - 1
yyf = dvr * xx + inter
if fig:
plt.plot(xx, yy, '.')
plt.plot(xx[tt], yyf[tt], 'r')
plt.show()
kps = {'bp': bp}
return kps
# mrtm - Ichise's multilinear reference tissue model
# PPET version: calculate input_dt and input_cum differently
def mrtm_ppet(tac, dt, ref, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# get input_dt from ref.tac, if dt and ref.dt are the same
if np.array_equal(dt, ref.dt):
input_dt = ref.tac
else:
inputff = interp1d(kt.dt2mft(ref.dt), ref.tac, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
# calculate input_cum using inputf1, and only until mid frame time
inputf1 = kt.interpt1(kt.dt2mft(ref.dt), ref.tac, dt)
input_cum1 = np.cumsum(inputf1)
input_cum1_mft = input_cum1[mft.astype(int)]
input_cum = input_cum1_mft
# tac_cum and input_cum are calculated in such a way to match tac
tac = tac[1:]
input_dt = input_dt[1:]
input_cum = input_cum[1:]
yy = tac
xx = np.column_stack((input_cum, tac_cum, input_dt))
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
mft = mft[1:]
reg = LinearRegression(fit_intercept=False).fit(xx[tt,], yy[tt])
bp = - reg.coef_[0] / reg.coef_[1] - 1
k2p = reg.coef_[0] / reg.coef_[2]
# for 1 TC
r1 = reg.coef_[2]
k2 = - reg.coef_[1]
if np.isnan(bp):
bp = 0
if np.isnan(r1):
r1 = 1.0
if r1 > 5:
r1 = 5
if r1 < -5:
r1 = -5
if bp > 10:
bp = 0
if bp < -10:
bp = 0
yyf = reg.predict(xx)
if fig:
plt.plot(mft, yy, '.')
plt.plot(mft, yyf, 'r')
plt.show()
kps = {'bp': bp, 'k2p': k2p, 'r1': r1, 'k2': k2}
return kps
# mrtm - Ichise's multilinear reference tissue model with fixed k2prime
# PPET version: calculate input_dt and input_cum differently
def mrtm_k2p_ppet(tac, dt, ref, k2p, linear_phase_start, linear_phase_end, fig):
if linear_phase_start is None:
linear_phase_start = 0
if linear_phase_end is None:
linear_phase_end = np.amax(dt)
# fill the coffee break gap
if kt.dt_has_gaps(dt):
tac, dt = kt.tac_dt_fill_coffee_break(tac, dt)
mft = kt.dt2mft(dt)
mft = np.append(0, mft)
dt_new = np.array([mft[:-1], mft[1:]])
tdur = kt.dt2tdur(dt_new)
# get input_dt from ref.tac, if dt and ref.dt are the same
if np.array_equal(dt, ref.dt):
input_dt = ref.tac
else:
inputff = interp1d(kt.dt2mft(ref.dt), ref.tac, kind='linear', fill_value='extrapolate')
input_dt = inputff(mft)
input_dt = input_dt[1:]
tac = np.append(0, tac)
input_dt = np.append(0, input_dt)
# set negative values to zero
tac[tac < 0] = 0.0
input_dt[input_dt < 0] = 0.0
tac_cum = np.cumsum((tac[:-1] + tac[1:]) / 2 * tdur)
# calculate input_cum using inputf1, and only until mid frame time
inputf1 = kt.interpt1(kt.dt2mft(ref.dt), ref.tac, dt)
input_cum1 = np.cumsum(inputf1)
input_cum1_mft = input_cum1[mft.astype(int)]
input_cum = input_cum1_mft
# tac_cum and input_cum are calculated in such a way to match tac
tac = tac[1:]
input_dt = input_dt[1:]
input_cum = input_cum[1:]
yy = tac
xx = np.column_stack((input_cum + 1 / k2p * input_dt, tac_cum))
# find tt for the linear phase
tt = np.logical_and(mft >= linear_phase_start, mft <= linear_phase_end)
tt = tt[1:]
mft = mft[1:]
reg = LinearRegression(fit_intercept=False).fit(xx[tt,], yy[tt])
bp = - reg.coef_[0] / reg.coef_[1] - 1
# for 1 TC
k2 = -reg.coef_[1]
r1 = reg.coef_[0] / k2p
if np.isnan(bp):
bp = 0
if np.isnan(r1):
r1 = 1.0
if r1 > 5:
r1 = 5
if r1 < -5:
r1 = -5
if bp > 10:
bp = 0
if bp < -10:
bp = 0
yyf = reg.predict(xx)
if fig:
plt.plot(mft, yy, '.')
plt.plot(mft, yyf, 'r')
plt.show()
kps = {'bp': bp, 'r1': r1}
return kps
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # non-linear models
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
# srtm - srtm model for tac, with non-linear optimisation
def srtm_fun(inputf1_dt, r1, k2, bp):
inputf1, dt = inputf1_dt
t1 = np.arange(np.amax(dt))
theta = kp.srtm_kp2theta(r1, k2, bp)
tac1 = theta[0] * inputf1
tac1 += theta[1] * np.convolve(inputf1, np.exp(-theta[2] * t1))[0:tac1.size]
tac = kt.int2dt(tac1, dt)
return tac
def srtm_para2tac(r1, k2, bp, inputf1_dt):
tacf = srtm_fun(inputf1_dt, r1, k2, bp)
kps = {'tacf': tacf}
return kps
def srtm_fun_w(inputf1_dt_w, r1, k2, bp):
inputf1, dt, w = inputf1_dt_w
tac = srtm_fun((inputf1, dt), r1, k2, bp)
if w is None:
w = 1
tac_w = tac * w
return tac_w
def srtm(tac, dt, inputf1, w):
inputf1_dt = inputf1, dt
inputf1_dt_w = inputf1, dt, w
if w is None:
w = 1
p, _ = curve_fit(srtm_fun_w, inputf1_dt_w, tac * w, p0=[1, 0.00005, 0.0], bounds=(0, [3, 1, 10]))
r1 = p[0]
k2 = p[1]
bp = p[2]
tacf = srtm_fun(inputf1_dt, r1, k2, bp)
kps = {'r1': r1, 'k2': k2, 'bp': bp, 'tacf': tacf}
return kps
# srtm_k2p - srtm model for tac with fixed k2p, with non-linear optimisation
def srtm_fun_k2p(inputf1_dt_k2p, theta_0, theta_2):
inputf1, dt, k2p = inputf1_dt_k2p
inputf1_dt = (inputf1, dt)
theta_1 = theta_0 * (k2p - theta_2)
theta = np.array([theta_0, theta_1, theta_2])
r1, k2, bp = kp.srtm_theta2kp(theta)
tac = srtm_fun(inputf1_dt, r1, k2, bp)
return tac
def srtm_fun_k2p_w(inputf1_dt_k2p_w, theta_0, theta_2):
inputf1, dt, k2p, w = inputf1_dt_k2p_w
tac = srtm_fun_k2p((inputf1, dt, k2p), theta_0, theta_2)
if w is None:
w = 1
tac_w = tac * w
return tac_w
def srtm_k2p(tac, dt, inputf1, w, k2p):
inputf1_dt_k2p = inputf1, dt, k2p
inputf1_dt_k2p_w = inputf1, dt, k2p, w
if w is None:
w = 1
p, _ = curve_fit(srtm_fun_k2p_w, inputf1_dt_k2p_w, tac * w, p0=(1, 0.5), bounds=(0, [3, 10]))
theta_0 = p[0]
theta_2 = p[1]
theta_1 = theta_0 * (k2p - theta_2)
theta = np.array([theta_0, theta_1, theta_2])
r1, k2, bp = kp.srtm_theta2kp(theta)
tacf = srtm_fun_k2p(inputf1_dt_k2p, theta_0, theta_2)
kps = {'r1': r1, 'k2': k2, 'bp': bp, 'tacf': tacf}
return kps
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # model for ref input
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
def exp_1_fun_t(t, a0, a1, b1):
cr = a0 + a1 * np.exp(-b1 * t)
return cr
def exp_1_fun(ts_te_w, a0, a1, b1):
ts, te, w = ts_te_w
if w is None:
w = 1
cr_dt_fun = a0 * (te - ts) - (a1 * (np.exp(-b1 * te) - np.exp(-b1 * ts))) / b1
return cr_dt_fun * w
def exp_1(tac, dt, idx, w, fig):
tac[tac < 0] = 0.0
if w is None:
w = np.ones_like(tac)
ts_te_w = (dt[0, idx], dt[1, idx], w[idx])
p0 = (1000, 5000, 10)
p, _ = curve_fit(exp_1_fun, ts_te_w, tac[idx] * w[idx], p0)
a0, a1, b1 = p
t1 = np.arange(np.amax(dt))
tac1f = exp_1_fun_t(t1, a0, a1, b1)
if fig:
print(p)
mft = kt.dt2mft(dt)
plt.plot(t1, tac1f, 'b', mft, tac, 'go')
plt.show()
return tac1f, p
def exp_2_fun_t(t, a0, a1, a2, b1, b2):
cr = a0 + a1 * np.exp(-b1 * t) + a2 * np.exp(-b2 * t)
return cr
def exp_2_fun(ts_te_w, a0, a1, a2, b1, b2):
ts, te, w = ts_te_w
if w is None:
w = 1
cr_dt_fun = a0 * (te - ts) - (a1 * (np.exp(-b1 * te) - np.exp(-b1 * ts))) / b1 - (a2 * (np.exp(-b2 * te)
- np.exp(-b2 * ts))) / b2
return cr_dt_fun * w
def exp_2(tac, dt, idx, w, fig):
tac[tac < 0] = 0.0
if w is None:
w = np.ones_like(tac)
ts_te_w = (dt[0, idx], dt[1, idx], w[idx])
p0 = (1, 1, 1, 0, 0)
p, _ = curve_fit(exp_2_fun, ts_te_w, tac[idx] * w[idx], p0)
a0, a1, a2, b1, b2 = p
t1 = np.arange(np.amax(dt))
tac1f = exp_2_fun_t(t1, a0, a1, a2, b1, b2)
if fig:
print(p)
mft = kt.dt2mft(dt)
plt.plot(t1, tac1f, 'b', mft, tac, 'go')
plt.show()
return tac1f, p
def exp_am(tac, dt, idx, fig):
tac[tac < 0] = 0.0
mft = kt.dt2mft(dt)
p0 = (0.1, 1, 0.1)
# p, _ = curve_fit(exp_1_fun_t, mft[idx], tac[idx], p0=p0, bounds=(0.00000001, 2500))
p, _ = curve_fit(exp_1_fun_t, mft[idx], tac[idx], p0=p0)
a0, a1, b1 = p
t1 = np.arange(np.amax(dt))
tac1f = exp_1_fun_t(t1, a0, a1, b1)
if fig:
print(p)
plt.plot(t1, tac1f, 'b', mft, tac, 'go')
plt.show()
return tac1f, p
def feng_srtm_fun(ts_te_w, a0, a1, a2, a3, b0, b1, b2, b3):
ts, te, w = ts_te_w
if w is None:
w = 1
cr_dt_fun = (a0 * a3 * (b0 ** 2 * te / (
b0 ** 4 * np.exp(b0 * te) - 2 * b0 ** 3 * b3 * np.exp(b0 * te) + b0 ** 2 * b3 ** 2 * np.exp(
b0 * te)) - b0 * b3 * te / (
b0 ** 4 * np.exp(b0 * te) - 2 * b0 ** 3 * b3 * np.exp(
b0 * te) + b0 ** 2 * b3 ** 2 * np.exp(
b0 * te)) + 2 * b0 / (
b0 ** 4 * np.exp(b0 * te) - 2 * b0 ** 3 * b3 * np.exp(
b0 * te) + b0 ** 2 * b3 ** 2 * np.exp(
b0 * te)) - b3 / (
b0 ** 4 * np.exp(b0 * te) - 2 * b0 ** 3 * b3 * np.exp(
b0 * te) + b0 ** 2 * b3 ** 2 * np.exp(
b0 * te))) - a0 * a3 * (b0 ** 2 * ts / (
b0 ** 4 * np.exp(b0 * ts) - 2 * b0 ** 3 * b3 * np.exp(b0 * ts) + b0 ** 2 * b3 ** 2 * np.exp(
b0 * ts)) - b0 * b3 * ts / (
b0 ** 4 * np.exp(b0 * ts) - 2 * b0 ** 3 * b3 * np.exp(
b0 * ts) + b0 ** 2 * b3 ** 2 * np.exp(b0 * ts)) + 2 * b0 / (
b0 ** 4 * np.exp(b0 * ts) - 2 * b0 ** 3 * b3 * np.exp(
b0 * ts) + b0 ** 2 * b3 ** 2 * np.exp(b0 * ts)) - b3 / (
b0 ** 4 * np.exp(b0 * ts) - 2 * b0 ** 3 * b3 * np.exp(
b0 * ts) + b0 ** 2 * b3 ** 2 * np.exp(
b0 * ts))) + a0 * a3 * np.exp(-b3 * ts) / (
b3 * (b0 ** 2 - 2 * b0 * b3 + b3 ** 2)) - a0 * a3 * np.exp(-b3 * te) / (
b3 * (b0 ** 2 - 2 * b0 * b3 + b3 ** 2)) - a1 * a3 * np.exp(-b1 * ts) / (
b1 ** 2 - b1 * b3) + a1 * a3 * np.exp(-b1 * te) / (b1 ** 2 - b1 * b3) + a1 * a3 * np.exp(
-b0 * ts) / (
b0 ** 2 - b0 * b3) - a1 * a3 * np.exp(-b0 * te) / (b0 ** 2 - b0 * b3) + a1 * a3 * np.exp(
-b3 * ts) / (
b3 * (b1 - b3)) - a1 * a3 * np.exp(-b3 * te) / (b3 * (b1 - b3)) - a1 * a3 * np.exp(
-b3 * ts) / (
b3 * (b0 - b3)) + a1 * a3 * np.exp(-b3 * te) / (b3 * (b0 - b3)) - a2 * a3 * np.exp(
-b2 * ts) / (
b2 ** 2 - b2 * b3) + a2 * a3 * np.exp(-b2 * te) / (b2 ** 2 - b2 * b3) + a2 * a3 * np.exp(
-b0 * ts) / (
b0 ** 2 - b0 * b3) - a2 * a3 * np.exp(-b0 * te) / (b0 ** 2 - b0 * b3) + a2 * a3 * np.exp(
-b3 * ts) / (
b3 * (b2 - b3)) - a2 * a3 * np.exp(-b3 * te) / (b3 * (b2 - b3)) - a2 * a3 * np.exp(
-b3 * ts) / (
b3 * (b0 - b3)) + a2 * a3 * np.exp(-b3 * te) / (b3 * (b0 - b3))) / (te - ts)
return cr_dt_fun * w
def feng_fun_t(t, a0, a1, a2, b0, b1, b2):
cp = a0 * t * np.exp(-b0 * t) + a1 * np.exp(-b1 * t) - a1 * np.exp(-b0 * t) + a2 * np.exp(-b2 * t) - a2 * np.exp(
-b0 * t)
return cp
def feng_srtm_fun_t(t, a0, a1, a2, a3, b0, b1, b2, b3):
cr1 = a0 * a3 * (-b0 * t * np.exp(b3 * t) / (
b0 ** 2 * np.exp(b0 * t) - 2 * b0 * b3 * np.exp(b0 * t) + b3 ** 2 * np.exp(b0 * t))
+ b3 * t * np.exp(b3 * t) / (
b0 ** 2 * np.exp(b0 * t) - 2 * b0 * b3 * np.exp(b0 * t) + b3 ** 2 * np.exp(b0 * t))
- np.exp(b3 * t) / (b0 ** 2 * np.exp(b0 * t) - 2 * b0 * b3 * np.exp(b0 * t) + b3 ** 2 * np.exp(
b0 * t))) * np.exp(-b3 * t)
cr2 = a0 * a3 * np.exp(-b3 * t) / (b0 ** 2 - 2 * b0 * b3 + b3 ** 2)
cr3 = - a1 * a3 / (b1 * np.exp(b1 * t) - b3 * np.exp(b1 * t))
cr4 = a1 * a3 / (b0 * np.exp(b0 * t) - b3 * np.exp(b0 * t))
cr5 = a1 * a3 * np.exp(-b3 * t) / (b1 - b3)
cr6 = - a1 * a3 * np.exp(-b3 * t) / (b0 - b3)
cr7 = - a2 * a3 / (b2 * np.exp(b2 * t) - b3 * np.exp(b2 * t))
cr8 = a2 * a3 / (b0 * np.exp(b0 * t) - b3 * np.exp(b0 * t))
cr9 = a2 * a3 * np.exp(-b3 * t) / (b2 - b3)
cr10 = - a2 * a3 * np.exp(-b3 * t) / (b0 - b3)
cr = np.stack((cr1, cr2, cr3, cr4, cr5, cr6, cr7, cr8, cr9, cr10))
cr[np.isinf(cr)] = 0
cr[np.isnan(cr)] = 0
cr = np.sum(cr, axis=0)
return cr
def feng_srtm(tac, dt, w, fig):
tac[tac < 0] = 0.0
if w is None:
w = 1
ts_te_w = (dt[0,], dt[1,], w)
# p0 = [3.90671734e+00, 4.34910151e+02, 9.22189828e+01, 1.35949657e-02, 4.56109635e-02, 4.53841116e-02, 4.54180443e-02, 7.71163349e-04]
p0 = [1, 2, 3, 4, 0.1, 0.2, 0.3, 0.4]
# p0 = [6.85579165e-05, -2.08643110e+01, -1.81889002e+02, 7.16906660e+00, 4.21217390e-04, 7.23514957e-02, 7.84986975e-02, 8.27340347e-02]
p, _ = curve_fit(feng_srtm_fun, ts_te_w, tac * w, p0=p0)
a0, a1, a2, a3, b0, b1, b2, b3 = p
t1 = np.arange(np.amax(dt))
tac1f = feng_srtm_fun_t(t1, a0, a1, a2, a3, b0, b1, b2, b3)
print(tac1f)
if fig:
print(p)
cp1f = feng_fun_t(t1, a0, a1, a2, b0, b1, b2)
mft = kt.dt2mft(dt)
plt.plot(t1, cp1f, 'r', t1, tac1f, 'b', mft, tac, 'go')
plt.show()
return tac1f, p
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # select model args from user_inputs
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
def get_model_inputs(user_inputs, model_name):
sig = inspect.signature(globals()[model_name])
model_inputs = dict()
for p in sig.parameters.values():
n = p.name
d = p.default
if d == inspect.Parameter.empty:
d = None
if user_inputs.get(n, 0) is not 0:
model_inputs.update({n: user_inputs.get(n)})
return model_inputs
| 32.846989 | 141 | 0.540362 | 5,412 | 33,274 | 3.177938 | 0.058574 | 0.041921 | 0.017908 | 0.019536 | 0.872725 | 0.856445 | 0.82656 | 0.794349 | 0.753416 | 0.743066 | 0 | 0.072824 | 0.301737 | 33,274 | 1,012 | 142 | 32.879447 | 0.667427 | 0.160696 | 0 | 0.780083 | 0 | 0 | 0.018268 | 0.000799 | 0 | 0 | 0 | 0 | 0 | 1 | 0.048409 | false | 0 | 0.016598 | 0 | 0.113416 | 0.006916 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
136edea0956ad1458f1539cbf65ce44a7761e39d | 22,249 | py | Python | shapes.py | olincollege/gimik | f0a5d2827c8f58a2348c3373557e1159f580704c | [
"MIT"
] | null | null | null | shapes.py | olincollege/gimik | f0a5d2827c8f58a2348c3373557e1159f580704c | [
"MIT"
] | null | null | null | shapes.py | olincollege/gimik | f0a5d2827c8f58a2348c3373557e1159f580704c | [
"MIT"
] | null | null | null | '''
Contains the Shape parent class and the shape subclasses containing all the
possible 3D shapes.
'''
import numpy as np
from abc import abstractmethod
def get_points_to_plot(axis_1_max, axis_1_min, axis_2_max,
axis_2_min, axis_3_val):
'''
Gets the points used in plotting the cuboid.
Args:
axis_1_max. A float representing the maximum value of the first
input axis.
axis_1_min. A float representing the minimum value of the first
input axis.
axis_2_max. A float representing the maximum value of the second
input axis.
axis_2_min. A float representing the minimum value of the second
input axis.
axis_3_val. A float representing the (constant) value of the third
input axis.
Returns:
axis_1_a. A numpy array of points representing the values of the
first axis for plotting.
axis_2_a. A numpy array of points representing the values of the
second axis for plotting.
axis_3_a. A numpy array of points representing the values of the
third axis for plotting.
'''
axis_1 = np.arange(axis_1_min, axis_1_max + (axis_1_max-axis_1_min) / 2,
(axis_1_max - axis_1_min) / 2)
axis_2 = np.arange(axis_2_min, axis_2_max + (axis_2_max-axis_2_min) / 2,
(axis_2_max - axis_2_min) / 2)
axis_1_a, axis_2_a = np.meshgrid(axis_1, axis_2)
axis_3_a = axis_1_a * 0 + axis_3_val
return axis_1_a, axis_2_a, axis_3_a
class Shape():
'''
A parent class for the various shape subclasses.
Attributes:
_x. A float representing the x-coordinate of the center of the shape.
_y. A float representing the y-coordinate of the center of the shape.
_z. A float representing the z-coordinate of the center of the shape.
'''
def __init__(self):
'''
A placeholder function to be overwritten by the subclass.
'''
self._x = 0
self._y = 0
self._z = 0
@abstractmethod
def move(self):
'''
A placeholder function to be overwritten by the subclass.
'''
@abstractmethod
def rotate(self):
'''
A placeholder function to be overwritten by the subclass.
'''
@abstractmethod
def scale(self, factor):
'''
A placeholder function to be overwritten by the subclass.
Attributes:
scale. A float representing the amount to scale the shape by.
'''
@abstractmethod
def __repr__(self):
'''
A placeholder function to be overwritten by the subclass.
'''
def set_pos(self, x_coord, y_coord, z_coord):
'''
Sets the x, y, and z coordinates of the shape to the values specified
in the inputs.
Args:
x_coord. A float representing the new x-coordinate of the center of the
shape.
y_coord. A float representing the new y-coordinate of the center of the
shape.
z_coord. A float representing the new z-coordinate of the center of the
shape.
'''
self._x = x_coord
self._y = y_coord
self._z = z_coord
def set_x(self, x_coord):
'''
Sets the x coordinate of the shape to the value specified in the input.
Args:
x_coord. A float representing the new x-coordinate of the center
of the shape.
'''
self._x = x_coord
def set_y(self, y_coord):
'''
Sets the y coordinate of the shape to the value specified in the input.
Args:
y_coord. A float representing the new y-coordinate of the center
of the shape.
'''
self._y = y_coord
def set_z(self, z_coord):
'''
Sets the z coordinate of the shape to the value specified in the input.
Args:
z_coord. A float representing the new z-coordinate of the center
of the shape.
'''
self._z = z_coord
@abstractmethod
def plot(self, axes, color = 'lightskyblue'):
'''
Plots the shape on the input axes.
Args:
axes. A set of matplotlib axes for the shape to be plotted on.
color. The color for the shape to be plotted with.
'''
# List of Shapes:
class Cuboid(Shape):
'''
A rectangular prism within a gimik workspace.
Attributes:
_width. A float representing the width (x-component) of the cuboid.
_depth. A float representing the depth (y-component) of the cuboid.
_height. A float representing the height (z-component) of the cuboid.
_x. A float representing the x-coordinate of the center of the cuboid.
_y. A float representing the y-coordinate of the center of the cuboid.
_z. A float representing the z-coordinate of the center of the cuboid.
_name. A string representing a name to characterize the cuboid.
'''
def __init__(self, width, height, depth, x_coord=0, y_coord=0, z_coord=0,
name='An unnamed cuboid'):
self._width = width
self._height = height
self._depth = depth
self._name = name
# Coordinates of center of the shape (if none given, puts
# shape at origin)
self._x = x_coord
self._y = y_coord
self._z = z_coord
@property
def x_pos(self):
'''
Returns the private attribute.
'''
return self._x
@property
def y_pos(self):
'''
Returns the private attribute.
'''
return self._y
@property
def z_pos(self):
'''
Returns the private attribute.
'''
return self._z
@property
def height(self):
'''
Returns the private attribute.
'''
return self._height
@property
def width(self):
'''
Returns the private attribute.
'''
return self._width
@property
def depth(self):
'''
Returns the private attribute.
'''
return self._depth
def scale(self, factor):
'''
Equally scales all the dimensions of the shape by the input factor.
Args:
factor. A float representing the amount the shape will be scaled by
'''
self._width = self._width * abs(factor)
self._height = self._height * abs(factor)
self._depth = self._depth * abs(factor)
def scale_width(self, factor):
'''
Scales the x-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the x-dimension of the
shape will be scaled by
'''
self._width = self._width * abs(factor)
def scale_height(self, factor):
'''
Scales the y-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the y-dimension of the
shape will be scaled by
'''
self._height = self._height * abs(factor)
def scale_depth(self, factor):
'''
Scales the z-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the z-dimension of the
shape will be scaled by
'''
self._depth = self._depth * abs(factor)
def move_x(self, displacement):
'''
Changes the x-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the x-coordinate of
the center of the shape will be changed by
'''
self._x += displacement
def move_y(self, displacement):
'''
Changes the y-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the x-coordinate of
the center of the shape will be changed by
'''
self._y += displacement
def move_z(self, displacement):
'''
Changes the z-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the z-coordinate of
the center of the shape will be changed by
'''
self._z += displacement
def set_name(self, new_name):
'''
Changes the name of the shape to the name specified by the input
argument.
Args:
new_name. A string representing the new name of the shape
'''
self._name = new_name
@property
def name(self):
'''
Returns the private attribute.
'''
return self._name
def plot(self, axes, color='lightskyblue'):
'''
Plots the shape on the input axes.
Args:
axes. A set of matplotlib axes representing the canvas to draw the
shape on.
color. A string representing the color of the shape to be plotted.
Defaults to 'lightskyblue' unless otherwise specified.
'''
min_x = self._x - self._width / 2
max_x = self._x + self._width / 2
min_y = self._y - self._height / 2
max_y = self._y + self._height / 2
min_z = self._z - self._depth / 2
max_z = self._z + self._depth / 2
# top
x_a, z_a, y_a = get_points_to_plot(max_x, min_x, max_z, min_z, max_y)
axes.plot_surface(x_a, y_a, z_a, color=color)
# bottom
x_a, z_a, y_a = get_points_to_plot(max_x, min_x, max_z, min_z, min_y)
axes.plot_surface(x_a, y_a, z_a, color=color)
# left
y_a, z_a, x_a = get_points_to_plot(max_y, min_y, max_z, min_z, min_x)
axes.plot_surface(x_a, y_a, z_a, color=color)
# right
y_a, z_a, x_a = get_points_to_plot(max_y, min_y, max_z, min_z, max_x)
axes.plot_surface(x_a, y_a, z_a, color=color)
# front
x_a, y_a, z_a = get_points_to_plot(max_x, min_x, max_y, min_y, max_z)
axes.plot_surface(x_a, y_a, z_a, color=color)
# back
x_a, y_a, z_a = get_points_to_plot(max_x, min_x, max_y, min_y, min_z)
axes.plot_surface(x_a, y_a, z_a, color=color)
def __repr__(self):
'''
Return the text representation of a Cuboid object.
'''
return f'Name: {self.name}\nWidth:{self._width}\nHeight:\
{self._height}\nDepth:{self._depth}\nCoordinates:{self._x},\
{self._y},{self._z}'
class Spheroid(Shape):
'''
A spheroid within a gimik workspace.
Attributes:
_width. A float representing the width (x-component) of the spheroid.
_depth. A float representing the depth (y-component) of the spheroid.
_height. A float representing the height (z-component) of the
spheroid.
_x. A float representing the x-coordinate of the center of the
spheroid.
_y. A float representing the y-coordinate of the center of the
spheroid.
_z. A float representing the z-coordinate of the center of the
spheroid.
_name. A string representing a name to characterize the spheroid.
'''
def __init__(self, width, height, depth, x_coord=0, y_coord=0, z_coord=0,
name='An unnamed spheroid'):
self._width = width
self._height = height
self._depth = depth
self._name = name
# Coordinates of center of the shape (if none given,
# puts shape at origin)
self._x = x_coord
self._y = y_coord
self._z = z_coord
@property
def x_pos(self):
'''
Returns the private attribute.
'''
return self._x
@property
def y_pos(self):
'''
Returns the private attribute.
'''
return self._y
@property
def z_pos(self):
'''
Returns the private attribute.
'''
return self._z
@property
def height(self):
'''
Returns the private attribute.
'''
return self._height
@property
def width(self):
'''
Returns the private attribute.
'''
return self._width
@property
def depth(self):
'''
Returns the private attribute.
'''
return self._depth
def scale(self, factor):
'''
Equally scales all the dimensions of the shape by the input factor.
Args:
factor. A float representing the amount the shape will be scaled by
'''
self._width = self._width * abs(factor)
self._height = self._height * abs(factor)
self._depth = self._depth * abs(factor)
def scale_width(self, factor):
'''
Scales the x-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the x-dimension of the
shape will be scaled by
'''
self._width = self._width * abs(factor)
def scale_height(self, factor):
'''
Scales the y-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the y-dimension of the
shape will be scaled by
'''
self._height = self._height * abs(factor)
def scale_depth(self, factor):
'''
Scales the z-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the z-dimension of the
shape will be scaled by
'''
self._depth = self._depth * abs(factor)
def move_x(self, displacement):
'''
Changes the x-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the x-coordinate of
the center of the shape will be changed by
'''
self._x += displacement
def move_y(self, displacement):
'''
Changes the y-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the x-coordinate of
the center of the shape will be changed by
'''
self._y += displacement
def move_z(self, displacement):
'''
Changes the z-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the z-coordinate of
the center of the shape will be changed by
'''
self._z += displacement
def set_name(self, new_name):
'''
Changes the name of the shape to the name specified by the input
argument.
Args:
new_name. A string representing the new name of the shape
'''
self._name = new_name
@property
def name(self):
'''
Returns the private attribute.
'''
return self._name
def plot(self, axes, color='lightskyblue'):
'''
Plots the shape on the input axes.
Args:
axes. A set of matplotlib axes representing the canvas to draw the
shape on.
color. A string representing the color of the shape to be plotted.
Defaults to 'lightskyblue' unless otherwise specified.
'''
u_vals = np.linspace(0, 2 * np.pi, 50)
v_vals = np.linspace(0, np.pi, 50)
x_vals = self._width/2 * np.outer(np.cos(u_vals), np.sin(v_vals))
y_vals = self._height/2 * np.outer(np.sin(u_vals), np.sin(v_vals))
z_vals = self._depth/2 * np.outer(np.ones(np.size(u_vals)),
np.cos(v_vals))
for item in x_vals:
item += self.x_pos
for item in y_vals:
item += self.y_pos
for item in z_vals:
item += self.z_pos
axes.plot_surface(x_vals, y_vals, z_vals, color=color)
def __repr__(self):
'''
Return the text representation of a Spheroid object.
'''
return f'Name: {self.name}\nSpheroid:\nWidth: {self._width}\nHeight: \
{self._height}\nDepth: {self._depth}\nCoordinates: {self._x}, \
{self._y}, {self._z}'
class Cylinder(Shape):
'''
A cylinder within a gimik workspace.
Attributes:
_width. A float representing the width (x-component) of the cylinder.
_depth. A float representing the depth (y-component) of the cylinder.
_height. A float representing the height (z-component) of the
cylinder.
_x. A float representing the x-coordinate of the center of the
cylinder.
_y. A float representing the y-coordinate of the center of the
cylinder.
_z. A float representing the z-coordinate of the center of the
cylinder.
_name. A string representing a name to characterize the cylinder.
'''
def __init__(self, width, height, depth, x_coord=0, y_coord=0, z_coord=0,
name='An unnamed cylinder'):
self._width = width # diameter in the x direction
self._height = height # height (z direction)
self._depth = depth # diameter in the y direction
self._name = name
# Coordinates of center of the shape (if none given,
# puts shape at origin)
self._x = x_coord
self._y = y_coord
self._z = z_coord
@property
def x_pos(self):
'''
Returns the private attribute.
'''
return self._x
@property
def y_pos(self):
'''
Returns the private attribute.
'''
return self._y
@property
def z_pos(self):
'''
Returns the private attribute.
'''
return self._z
@property
def height(self):
'''
Returns the private attribute.
'''
return self._height
@property
def width(self):
'''
Returns the private attribute.
'''
return self._width
@property
def depth(self):
'''
Returns the private attribute.
'''
return self._depth
def scale(self, factor):
'''
Equally scales all the dimensions of the shape by the input factor.
Args:
factor. A float representing the amount the shape will be scaled by
'''
self._width = self._width * abs(factor)
self._height = self._height * abs(factor)
self._depth = self._depth * abs(factor)
def scale_width(self, factor):
'''
Scales the x-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the x-dimension of the
shape will be scaled by
'''
self._width = self._width * abs(factor)
def scale_height(self, factor):
'''
Scales the y-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the y-dimension of the
shape will be scaled by
'''
self._height = self._height * abs(factor)
def scale_depth(self, factor):
'''
Scales the z-dimension of the shape by the input factor.
Args:
factor. A float representing the amount the z-dimension of the
shape will be scaled by
'''
self._depth = self._depth * abs(factor)
def move_x(self, displacement):
'''
Changes the x-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the x-coordinate of
the center of the shape will be changed by
'''
self._x += displacement
def move_y(self, displacement):
'''
Changes the y-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the x-coordinate of
the center of the shape will be changed by
'''
self._y += displacement
def move_z(self, displacement):
'''
Changes the z-coordinate of the center of the shape by the input
displacement.
Args:
displacement. A float representing the amount the z-coordinate of
the center of the shape will be changed by
'''
self._z += displacement
def set_name(self, new_name):
'''
Changes the name of the shape to the name specified by the input
argument.
Args:
new_name. A string representing the new name of the shape
'''
self._name = new_name
@property
def name(self):
'''
Returns the private attribute.
'''
return self._name
def plot(self, axes, color='lightskyblue'):
'''
Plots the shape on the input axes.
Args:
axes. A set of matplotlib axes representing the canvas to draw the
shape on.
color. A string representing the color of the shape to be plotted.
Defaults to 'lightskyblue' unless otherwise specified.
'''
radius_x = 1/2*self.width
radius_y = 1/2*self.height
z_vals = np.linspace(0-(self.depth/2), self.depth-(self.depth/2), 50)
theta = np.linspace(0, 2*np.pi, 50)
theta_grid, z_grid = np.meshgrid(theta, z_vals)
x_grid = radius_x*np.cos(theta_grid) + self._x
y_grid = radius_y*np.sin(theta_grid) + self._y
rstride = 20
cstride = 10
axes.plot_surface(x_grid, y_grid, z_grid, rstride=rstride,
cstride=cstride, color=color)
def __repr__(self):
'''
Return the text representation of a Cylinder object.
'''
return f'Name: {self.name}\nCylinder:\nWidth: {self._width}\nHeight: \
{self._height}\nDepth: {self._depth}\nCoordinates: {self._x}, \
{self._y}, {self._z}'
| 29.744652 | 83 | 0.583622 | 2,966 | 22,249 | 4.217802 | 0.058665 | 0.05036 | 0.051159 | 0.090647 | 0.865068 | 0.859153 | 0.834133 | 0.820064 | 0.813669 | 0.784732 | 0 | 0.005984 | 0.339026 | 22,249 | 747 | 84 | 29.784471 | 0.844689 | 0.467122 | 0 | 0.720833 | 0 | 0 | 0.011174 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.270833 | false | 0 | 0.008333 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
138028e54d231a98825eee644a69f0756dc8d091 | 21,274 | py | Python | test/test_daemail.py | jwodder/daemail | 5e6c68742e08180255c60fa7fbe5e88ccaa55d9a | [
"MIT"
] | 2 | 2016-04-30T02:32:12.000Z | 2020-09-10T10:54:55.000Z | test/test_daemail.py | jwodder/daemail | 5e6c68742e08180255c60fa7fbe5e88ccaa55d9a | [
"MIT"
] | null | null | null | test/test_daemail.py | jwodder/daemail | 5e6c68742e08180255c60fa7fbe5e88ccaa55d9a | [
"MIT"
] | 1 | 2017-06-11T16:55:41.000Z | 2017-06-11T16:55:41.000Z | from datetime import datetime, timedelta, timezone
import email
from email import policy
import mailbox
import os
from pathlib import Path
import subprocess
from traceback import format_exception
from types import SimpleNamespace
from typing import Any, Dict, List
from click.testing import CliRunner, Result
from mailbits import email2dict
import pytest
from pytest_mock import MockerFixture
from daemail.__main__ import main
from daemail.message import USER_AGENT
w4 = timezone(timedelta(hours=-4))
MOCK_START = datetime(2020, 3, 11, 16, 22, 32, 10203, w4)
MOCK_END = datetime(2020, 3, 11, 16, 24, 19, 102030, w4)
def show_result(r: Result) -> Any:
if r.exception is not None:
assert isinstance(r.exc_info, tuple)
return "".join(format_exception(*r.exc_info))
else:
return r.stdout, r.stderr
@pytest.mark.parametrize(
"opts,argv,run_kwargs,cmdresult,mailspec",
[
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.STDOUT},
SimpleNamespace(
returncode=0,
stdout=b"This is the output.\n",
stderr=None,
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [
{"display_name": "", "address": "null@test.test"},
],
"subject": "[DONE] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output:\n"
"> This is the output.\n"
),
"epilogue": None,
},
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"-t",
"Interested Party <them@org.test>",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.STDOUT},
SimpleNamespace(
returncode=0,
stdout=b"This is the output.\n",
stderr=None,
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [
{"display_name": "", "address": "null@test.test"},
{
"display_name": "Interested Party",
"address": "them@org.test",
},
],
"subject": "[DONE] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output:\n"
"> This is the output.\n"
),
"epilogue": None,
},
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
],
["space command", "space file"],
{"stdout": subprocess.PIPE, "stderr": subprocess.STDOUT},
SimpleNamespace(
returncode=0,
stdout=b"This is the output.\n",
stderr=None,
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[DONE] 'space command' 'space file'",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output:\n"
"> This is the output.\n"
),
"epilogue": None,
},
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--nonempty",
"--no-stdout",
"--no-stderr",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": None, "stderr": None},
SimpleNamespace(
returncode=1,
stdout=None,
stderr=None,
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[FAILED] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 1\n"
),
"epilogue": None,
},
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--failure-only",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.STDOUT},
SimpleNamespace(
returncode=1,
stdout=b"Something went wrong.\n",
stderr=None,
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[FAILED] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 1\n"
"\n"
"Output:\n"
"> Something went wrong.\n"
),
"epilogue": None,
},
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--nonempty",
"--split",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.PIPE},
SimpleNamespace(
returncode=0,
stdout=b"This is the stdout.\n",
stderr=b"",
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[DONE] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output:\n"
"> This is the stdout.\n"
),
"epilogue": None,
},
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--nonempty",
"--split",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.PIPE},
SimpleNamespace(
returncode=0,
stdout=b"",
stderr=b"This is the stderr.\n",
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[DONE] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output: none\n"
"\n"
"Error Output:\n"
"> This is the stderr.\n"
),
"epilogue": None,
},
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--foreground",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.STDOUT},
SimpleNamespace(
returncode=0,
stdout=b"This is the output.\n",
stderr=None,
),
{
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[DONE] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output:\n"
"> This is the output.\n"
),
"epilogue": None,
},
),
],
)
def test_daemail(
mocker: MockerFixture,
opts: List[str],
argv: List[str],
run_kwargs: Dict[str, Any],
cmdresult: Any,
mailspec: Dict[str, Any],
) -> None:
daemon_mock = mocker.patch("daemon.DaemonContext", autospec=True)
run_mock = mocker.patch("subprocess.run", return_value=cmdresult)
dtnow_mock = mocker.patch(
"daemail.util.dtnow",
side_effect=[MOCK_START, MOCK_END],
)
runner = CliRunner()
with runner.isolated_filesystem():
Path("config.toml").write_text(
"[outgoing]\n" 'method = "mbox"\n' 'path = "daemail.mbox"\n'
)
r = runner.invoke(main, [*opts, "--config", "config.toml", *argv])
assert r.exit_code == 0, show_result(r)
if "--foreground" in opts:
assert not daemon_mock.called
else:
assert daemon_mock.call_count == 1
assert daemon_mock.return_value.__enter__.call_count == 1
run_mock.assert_called_once_with(argv, **run_kwargs)
assert dtnow_mock.call_count == 2
assert sorted(os.listdir()) == ["config.toml", "daemail.mbox"]
mbox = mailbox.mbox("daemail.mbox")
mbox.lock()
msgs = list(mbox)
mbox.close()
assert len(msgs) == 1
msgdict = email2dict(msgs[0])
msgdict["unixfrom"] = None
assert msgdict == mailspec
@pytest.mark.parametrize(
"opts,argv,run_kwargs,cmdresult",
[
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--failure-only",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.STDOUT},
SimpleNamespace(
returncode=0,
stdout=b"This is the output.\n",
stderr=None,
),
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--nonempty",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.STDOUT},
SimpleNamespace(
returncode=0,
stdout=b"",
stderr=None,
),
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--nonempty",
"--split",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": subprocess.PIPE, "stderr": subprocess.PIPE},
SimpleNamespace(
returncode=0,
stdout=b"",
stderr=b"",
),
),
(
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"--nonempty",
"--no-stdout",
"--no-stderr",
],
["not-a-real-command", "-x", "foo.txt"],
{"stdout": None, "stderr": None},
SimpleNamespace(
returncode=0,
stdout=None,
stderr=None,
),
),
],
)
def test_no_message(
mocker: MockerFixture,
opts: List[str],
argv: List[str],
run_kwargs: Dict[str, Any],
cmdresult: Any,
) -> None:
daemon_mock = mocker.patch("daemon.DaemonContext", autospec=True)
run_mock = mocker.patch("subprocess.run", return_value=cmdresult)
dtnow_mock = mocker.patch(
"daemail.util.dtnow",
side_effect=[MOCK_START, MOCK_END],
)
runner = CliRunner()
with runner.isolated_filesystem():
Path("config.toml").write_text(
"[outgoing]\n" 'method = "mbox"\n' 'path = "daemail.mbox"\n'
)
r = runner.invoke(main, [*opts, "--config", "config.toml", *argv])
assert r.exit_code == 0, show_result(r)
assert daemon_mock.call_count == 1
assert daemon_mock.return_value.__enter__.call_count == 1
run_mock.assert_called_once_with(argv, **run_kwargs)
assert dtnow_mock.call_count == 2
assert os.listdir() == ["config.toml"]
def test_sendmail_failure(mocker: MockerFixture) -> None:
daemon_mock = mocker.patch("daemon.DaemonContext", autospec=True)
run_mock = mocker.patch(
"subprocess.run",
side_effect=[
SimpleNamespace(
returncode=0,
stdout=b"This is the output.\n",
stderr=None,
),
subprocess.CalledProcessError(
returncode=1,
cmd=["sendmail", "-i", "-t"],
output=b"All the foos are bar when they should be baz.\n",
stderr=b"",
),
],
)
dtnow_mock = mocker.patch(
"daemail.util.dtnow",
side_effect=[MOCK_START, MOCK_END],
)
runner = CliRunner()
argv = ["not-a-real-command", "-x", "foo.txt"]
with runner.isolated_filesystem():
Path("config.toml").write_text('[outgoing]\nmethod = "command"\n')
r = runner.invoke(
main,
[
"-t",
"null@test.test",
"-f",
"Me <sender@example.nil>",
"-c",
"config.toml",
*argv,
],
)
assert r.exit_code == 0, show_result(r)
assert daemon_mock.call_count == 1
assert daemon_mock.return_value.__enter__.call_count == 1
assert run_mock.call_args_list == [
mocker.call(argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT),
mocker.call(
["sendmail", "-i", "-t"],
shell=False,
input=mocker.ANY,
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
),
]
sent_msg = email.message_from_bytes(
run_mock.call_args_list[1][1]["input"],
policy=policy.default,
)
assert email2dict(sent_msg) == {
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[DONE] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output:\n"
"> This is the output.\n"
),
"epilogue": None,
}
assert dtnow_mock.call_count == 2
assert sorted(os.listdir()) == ["config.toml", "dead.letter"]
mbox = mailbox.mbox("dead.letter")
mbox.lock()
dead_msgs = list(mbox)
mbox.close()
assert len(dead_msgs) == 1
msgdict = email2dict(dead_msgs[0])
msgdict["unixfrom"] = None
assert msgdict == {
"unixfrom": None,
"headers": {
"from": [{"display_name": "Me", "address": "sender@example.nil"}],
"to": [{"display_name": "", "address": "null@test.test"}],
"subject": "[DONE] not-a-real-command -x foo.txt",
"user-agent": [USER_AGENT],
"content-type": {
"content_type": "text/plain",
"params": {},
},
},
"preamble": None,
"content": (
"Start Time: 2020-03-11 16:22:32.010203-04:00\n"
"End Time: 2020-03-11 16:24:19.102030-04:00\n"
"Exit Status: 0\n"
"\n"
"Output:\n"
"> This is the output.\n"
"\n"
"Additionally, an error occurred while trying to send this e-mail:\n"
"\n"
"Command: ['sendmail', '-i', '-t']\n"
"Exit Status: 1\n"
"\n"
"Output:\n"
"> All the foos are bar when they should be baz.\n"
),
"epilogue": None,
}
# daemail printf '%s\n' $'foo\nbar'
# daemail printf '%s\n' $'foo\xe2bar'
# daemail printf '%s\n' $'go\xf0\x9f\x90\x90at'
# daemail printf '%s\n' $'baaaad \xed\xa0\xbd\xed\xb0\x90 goat'
| 34.202572 | 86 | 0.407681 | 1,925 | 21,274 | 4.425974 | 0.116364 | 0.021596 | 0.032394 | 0.036972 | 0.822183 | 0.800235 | 0.79061 | 0.7723 | 0.756808 | 0.749296 | 0 | 0.048923 | 0.443687 | 21,274 | 621 | 87 | 34.257649 | 0.670976 | 0.00832 | 0 | 0.72562 | 0 | 0.033058 | 0.274749 | 0.025081 | 0 | 0 | 0 | 0 | 0.041322 | 1 | 0.006612 | false | 0 | 0.026446 | 0 | 0.036364 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1390435aae4a999e7618d60d00701ed75badfe87 | 27,835 | py | Python | backend/tests/test_api.py | dineshsonachalam/pytest-basics | 13c6a936dd8baf5d213e6563eadd71774c6ed384 | [
"MIT"
] | 4 | 2018-06-05T13:54:32.000Z | 2020-08-25T01:59:59.000Z | backend/tests/test_api.py | dineshsonachalam/pytest-basics | 13c6a936dd8baf5d213e6563eadd71774c6ed384 | [
"MIT"
] | null | null | null | backend/tests/test_api.py | dineshsonachalam/pytest-basics | 13c6a936dd8baf5d213e6563eadd71774c6ed384 | [
"MIT"
] | 3 | 2019-10-30T21:59:14.000Z | 2020-08-25T02:00:37.000Z | import requests
def test_gender_stats():
url = "http://0.0.0.0:8002/gender/stats"
response = requests.request("GET", url, headers={}, data={})
assert response.status_code == 200
response = response.json()
expected_response = [
{
"gender": "Males",
"value": 21790
},
{
"gender": "Females",
"value": 10771
}
]
assert len(response) == len(expected_response)
assert all([a == b for a, b in zip(response, expected_response)])
print(all([a == b for a, b in zip(response, expected_response)]))
def test_relationship_stats():
url = "http://0.0.0.0:8002/relationship/stats"
response = requests.request("GET", url, headers={}, data={})
assert response.status_code == 200
response = response.json()
expected_response = [
{
"relationship": "Wife",
"value": 1568
},
{
"relationship": "Own-child",
"value": 5068
},
{
"relationship": "Husband",
"value": 13193
},
{
"relationship": "Not-in-family",
"value": 8305
},
{
"relationship": "Other-relative",
"value": 981
},
{
"relationship": "Unmarried",
"value": 3446
}
]
assert len(response) == len(expected_response)
assert all([a == b for a, b in zip(response, expected_response)])
print(all([a == b for a, b in zip(response, expected_response)]))
def test_adult_data():
url = "http://0.0.0.0:8002/adult/stats"
response = requests.request("GET", url, headers={}, data={})
assert response.status_code == 200
response = response.json()
expected_response = [
{
"age": "39",
"education": "Bachelors",
"id": 1,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "50",
"education": "Bachelors",
"id": 2,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "38",
"education": "HS-grad",
"id": 3,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "53",
"education": "11th",
"id": 4,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "28",
"education": "Bachelors",
"id": 5,
"marital_status": "Married-civ-spouse",
"native_country": "Cuba",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "37",
"education": "Masters",
"id": 6,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "49",
"education": "9th",
"id": 7,
"marital_status": "Married-spouse-absent",
"native_country": "Jamaica",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "52",
"education": "HS-grad",
"id": 8,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "31",
"education": "Masters",
"id": 9,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": ">50K",
"sex": "Female"
},
{
"age": "42",
"education": "Bachelors",
"id": 10,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "37",
"education": "Some-college",
"id": 11,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "30",
"education": "Bachelors",
"id": 12,
"marital_status": "Married-civ-spouse",
"native_country": "India",
"salary": ">50K",
"sex": "Male"
},
{
"age": "23",
"education": "Bachelors",
"id": 13,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "32",
"education": "Assoc-acdm",
"id": 14,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "40",
"education": "Assoc-voc",
"id": 15,
"marital_status": "Married-civ-spouse",
"native_country": "?",
"salary": ">50K",
"sex": "Male"
},
{
"age": "34",
"education": "7th-8th",
"id": 16,
"marital_status": "Married-civ-spouse",
"native_country": "Mexico",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "25",
"education": "HS-grad",
"id": 17,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "32",
"education": "HS-grad",
"id": 18,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "38",
"education": "11th",
"id": 19,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "43",
"education": "Masters",
"id": 20,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": ">50K",
"sex": "Female"
},
{
"age": "40",
"education": "Doctorate",
"id": 21,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "54",
"education": "HS-grad",
"id": 22,
"marital_status": "Separated",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "35",
"education": "9th",
"id": 23,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "43",
"education": "11th",
"id": 24,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "59",
"education": "HS-grad",
"id": 25,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "56",
"education": "Bachelors",
"id": 26,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "19",
"education": "HS-grad",
"id": 27,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "54",
"education": "Some-college",
"id": 28,
"marital_status": "Married-civ-spouse",
"native_country": "South",
"salary": ">50K",
"sex": "Male"
},
{
"age": "39",
"education": "HS-grad",
"id": 29,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "49",
"education": "HS-grad",
"id": 30,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "23",
"education": "Assoc-acdm",
"id": 31,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "20",
"education": "Some-college",
"id": 32,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "45",
"education": "Bachelors",
"id": 33,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "30",
"education": "Some-college",
"id": 34,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "22",
"education": "Some-college",
"id": 35,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "48",
"education": "11th",
"id": 36,
"marital_status": "Never-married",
"native_country": "Puerto-Rico",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "21",
"education": "Some-college",
"id": 37,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "19",
"education": "HS-grad",
"id": 38,
"marital_status": "Married-AF-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "31",
"education": "Some-college",
"id": 39,
"marital_status": "Married-civ-spouse",
"native_country": "?",
"salary": ">50K",
"sex": "Male"
},
{
"age": "48",
"education": "Assoc-acdm",
"id": 40,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "31",
"education": "9th",
"id": 41,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "53",
"education": "Bachelors",
"id": 42,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "24",
"education": "Bachelors",
"id": 43,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "49",
"education": "HS-grad",
"id": 44,
"marital_status": "Separated",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "25",
"education": "HS-grad",
"id": 45,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "57",
"education": "Bachelors",
"id": 46,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "53",
"education": "HS-grad",
"id": 47,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "44",
"education": "Masters",
"id": 48,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "41",
"education": "Assoc-voc",
"id": 49,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "29",
"education": "Assoc-voc",
"id": 50,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "25",
"education": "Some-college",
"id": 51,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "18",
"education": "HS-grad",
"id": 52,
"marital_status": "Never-married",
"native_country": "?",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "47",
"education": "Prof-school",
"id": 53,
"marital_status": "Married-civ-spouse",
"native_country": "Honduras",
"salary": ">50K",
"sex": "Female"
},
{
"age": "50",
"education": "Bachelors",
"id": 54,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "47",
"education": "HS-grad",
"id": 55,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "43",
"education": "Some-college",
"id": 56,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "46",
"education": "5th-6th",
"id": 57,
"marital_status": "Married-civ-spouse",
"native_country": "Mexico",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "35",
"education": "Assoc-voc",
"id": 58,
"marital_status": "Married-civ-spouse",
"native_country": "Puerto-Rico",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "41",
"education": "HS-grad",
"id": 59,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "30",
"education": "HS-grad",
"id": 60,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "30",
"education": "Bachelors",
"id": 61,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "32",
"education": "7th-8th",
"id": 62,
"marital_status": "Married-spouse-absent",
"native_country": "?",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "48",
"education": "HS-grad",
"id": 63,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "42",
"education": "Doctorate",
"id": 64,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "29",
"education": "Some-college",
"id": 65,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "36",
"education": "HS-grad",
"id": 66,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "28",
"education": "Some-college",
"id": 67,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "53",
"education": "HS-grad",
"id": 68,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Female"
},
{
"age": "49",
"education": "Some-college",
"id": 69,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "25",
"education": "Some-college",
"id": 70,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "19",
"education": "Some-college",
"id": 71,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "31",
"education": "Bachelors",
"id": 72,
"marital_status": "Separated",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "29",
"education": "Bachelors",
"id": 73,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "23",
"education": "Some-college",
"id": 74,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "79",
"education": "Some-college",
"id": 75,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "27",
"education": "HS-grad",
"id": 76,
"marital_status": "Never-married",
"native_country": "Mexico",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "40",
"education": "Assoc-acdm",
"id": 77,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "67",
"education": "10th",
"id": 78,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "18",
"education": "11th",
"id": 79,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "31",
"education": "7th-8th",
"id": 80,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "18",
"education": "HS-grad",
"id": 81,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "52",
"education": "Bachelors",
"id": 82,
"marital_status": "Married-civ-spouse",
"native_country": "Cuba",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "46",
"education": "HS-grad",
"id": 83,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "59",
"education": "HS-grad",
"id": 84,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "44",
"education": "HS-grad",
"id": 85,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": ">50K",
"sex": "Female"
},
{
"age": "53",
"education": "HS-grad",
"id": 86,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "49",
"education": "HS-grad",
"id": 87,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "33",
"education": "Masters",
"id": 88,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "30",
"education": "9th",
"id": 89,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "43",
"education": "Doctorate",
"id": 90,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": ">50K",
"sex": "Female"
},
{
"age": "57",
"education": "Assoc-voc",
"id": 91,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "37",
"education": "Some-college",
"id": 92,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "28",
"education": "Some-college",
"id": 93,
"marital_status": "Divorced",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "30",
"education": "HS-grad",
"id": 94,
"marital_status": "Married-civ-spouse",
"native_country": "?",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "34",
"education": "Bachelors",
"id": 95,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "29",
"education": "Some-college",
"id": 96,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
},
{
"age": "48",
"education": "Doctorate",
"id": 97,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "37",
"education": "Some-college",
"id": 98,
"marital_status": "Married-civ-spouse",
"native_country": "United-States",
"salary": ">50K",
"sex": "Male"
},
{
"age": "48",
"education": "Assoc-acdm",
"id": 99,
"marital_status": "Divorced",
"native_country": "England",
"salary": "<=50K",
"sex": "Female"
},
{
"age": "32",
"education": "HS-grad",
"id": 100,
"marital_status": "Never-married",
"native_country": "United-States",
"salary": "<=50K",
"sex": "Male"
}
]
assert len(response) == len(expected_response)
assert all([a == b for a, b in zip(response, expected_response)])
print(all([a == b for a, b in zip(response, expected_response)]))
| 28.784902 | 69 | 0.378696 | 2,102 | 27,835 | 4.90961 | 0.095147 | 0.125969 | 0.116279 | 0.201066 | 0.853876 | 0.839535 | 0.81405 | 0.775 | 0.764341 | 0.752035 | 0 | 0.044033 | 0.442752 | 27,835 | 966 | 70 | 28.8147 | 0.621301 | 0 | 0 | 0.624091 | 0 | 0 | 0.360625 | 0.001509 | 0 | 0 | 0 | 0 | 0.009346 | 1 | 0.003115 | false | 0 | 0.001038 | 0 | 0.004154 | 0.003115 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
13921b98d022f8c3529ef3b1d699fd0e7607afb5 | 1,263 | py | Python | 008_largest_product/solve.py | kopwei/euler | 8b1bdc3f8162819d44f3f40121a5ad8e14a3f5ba | [
"Unlicense"
] | null | null | null | 008_largest_product/solve.py | kopwei/euler | 8b1bdc3f8162819d44f3f40121a5ad8e14a3f5ba | [
"Unlicense"
] | null | null | null | 008_largest_product/solve.py | kopwei/euler | 8b1bdc3f8162819d44f3f40121a5ad8e14a3f5ba | [
"Unlicense"
] | null | null | null | s = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
nums = [int(c) for c in s]
start = 0
end = 12
max_prod = 0
prod = 1
while end < len(nums):
prod = 1
for i in range(start, end + 1):
prod *= nums[i]
if prod > max_prod:
max_prod = prod
start += 1
end += 1
print(max_prod) | 78.9375 | 1,006 | 0.901821 | 48 | 1,263 | 23.645833 | 0.416667 | 0.02467 | 0.019383 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.859455 | 0.070467 | 1,263 | 16 | 1,007 | 78.9375 | 0.107325 | 0 | 0 | 0.133333 | 0 | 0 | 0.791139 | 0.791139 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.066667 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
13c735bf1d3cad4c32060d249e6ecd43e9fd5f3d | 61 | py | Python | robotframework-ls/tests/robotframework_debug_adapter_tests/_dap_resources/mypylib.py | mardukbp/robotframework-lsp | 57b4b2b14b712c9bf90577924a920fb9b9e831c7 | [
"ECL-2.0",
"Apache-2.0"
] | 92 | 2020-01-22T22:15:29.000Z | 2022-03-31T05:19:16.000Z | robotframework-ls/tests/robotframework_debug_adapter_tests/_dap_resources/mypylib.py | mardukbp/robotframework-lsp | 57b4b2b14b712c9bf90577924a920fb9b9e831c7 | [
"ECL-2.0",
"Apache-2.0"
] | 604 | 2020-01-25T17:13:27.000Z | 2022-03-31T18:58:24.000Z | robotframework-ls/tests/robotframework_debug_adapter_tests/_dap_resources/mypylib.py | mardukbp/robotframework-lsp | 57b4b2b14b712c9bf90577924a920fb9b9e831c7 | [
"ECL-2.0",
"Apache-2.0"
] | 39 | 2020-02-06T00:38:06.000Z | 2022-03-15T06:14:19.000Z | def some_call():
return "Some Call return" # break here
| 20.333333 | 43 | 0.672131 | 9 | 61 | 4.444444 | 0.666667 | 0.4 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.229508 | 61 | 2 | 44 | 30.5 | 0.851064 | 0.163934 | 0 | 0 | 0 | 0 | 0.326531 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 7 |
13e5b7f898a94c3108b504e4b309f13ee423bde2 | 27,554 | py | Python | tests/changes/listeners/test_mail.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 443 | 2015-01-03T16:28:39.000Z | 2021-04-26T16:39:46.000Z | tests/changes/listeners/test_mail.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 12 | 2015-07-30T19:07:16.000Z | 2016-11-07T23:11:21.000Z | tests/changes/listeners/test_mail.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 47 | 2015-01-09T10:04:00.000Z | 2020-11-18T17:58:19.000Z | from datetime import datetime
from flask import current_app
import mock
import uuid
from changes.config import db
from changes.constants import Result, Status
from changes.models.option import ItemOption
from changes.models.project import ProjectOption
from changes.lib import build_context_lib
from changes.listeners.mail import filter_recipients, MailNotificationHandler, build_finished_handler
from changes.testutils.cases import TestCase
class FilterRecipientsTestCase(TestCase):
def test_simple(self):
results = filter_recipients(
['foo@example.com', 'bar@localhost'], ['example.com'])
assert results == ['foo@example.com']
results = filter_recipients(
['foo@example.com', 'bar@localhost'], ['example.com', 'localhost'])
assert results == ['foo@example.com', 'bar@localhost']
results = filter_recipients(
['Foo Bar <foo@example.com>'], ['example.com'])
assert results == ['Foo Bar <foo@example.com>']
class GetRecipientsTestCase(TestCase):
def test_default_options(self):
project = self.create_project()
author = self.create_author('foo@example.com')
build = self.create_build(project, result=Result.failed, author=author)
handler = MailNotificationHandler()
recipients = handler.get_build_recipients(build)
assert recipients == ['{0} <foo@example.com>'.format(author.name)]
def test_without_author_option(self):
project = self.create_project()
db.session.add(ProjectOption(
project=project, name='mail.notify-author', value='0'))
author = self.create_author('foo@example.com')
build = self.create_build(project, result=Result.failed, author=author)
db.session.commit()
handler = MailNotificationHandler()
recipients = handler.get_build_recipients(build)
assert recipients == []
def test_with_addressees(self):
project = self.create_project()
db.session.add(ProjectOption(
project=project, name='mail.notify-author', value='1'))
db.session.add(ProjectOption(
project=project, name='mail.notify-addresses',
value='test@example.com, bar@example.com'))
author = self.create_author('foo@example.com')
build = self.create_build(project, result=Result.failed, author=author)
db.session.commit()
handler = MailNotificationHandler()
recipients = handler.get_build_recipients(build)
assert recipients == [
'{0} <foo@example.com>'.format(author.name),
'test@example.com',
'bar@example.com',
]
def test_build_passed(self):
project = self.create_project()
db.session.add(ProjectOption(
project=project, name='mail.notify-author', value='1'))
db.session.add(ProjectOption(
project=project, name='mail.notify-addresses',
value='test@example.com, bar@example.com'))
author = self.create_author('foo@example.com')
build = self.create_build(project, result=Result.passed, author=author)
db.session.commit()
handler = MailNotificationHandler()
recipients = handler.get_build_recipients(build)
assert recipients == []
def test_with_revision_addressees(self):
project = self.create_project()
db.session.add(ProjectOption(
project=project, name='mail.notify-author', value='1'))
db.session.add(ProjectOption(
project=project, name='mail.notify-addresses-revisions',
value='test@example.com, bar@example.com'))
author = self.create_author('foo@example.com')
author_recipient = '{0} <{1}>'.format(author.name, author.email)
patch = self.create_patch(repository=project.repository)
source = self.create_source(project, patch=patch)
patch_build = self.create_build(
project=project,
source=source,
author=author,
result=Result.failed,
)
db.session.commit()
patch_recipients = MailNotificationHandler().get_build_recipients(patch_build)
assert patch_recipients == [author_recipient]
ss_build = self.create_build(
project=project,
result=Result.failed,
author=author,
tags=['test-snapshot'],
)
ss_recipients = MailNotificationHandler().get_build_recipients(ss_build)
assert ss_recipients == [author_recipient]
commit_build = self.create_build(
project=project,
result=Result.failed,
author=author,
tags=['commit'],
)
commit_recipients = MailNotificationHandler().get_build_recipients(commit_build)
assert commit_recipients == [
author_recipient,
'test@example.com',
'bar@example.com',
]
class GetCollectionRecipientsTestCase(TestCase):
def test_diff_passed_and_failed(self):
project = self.create_project()
db.session.add(ProjectOption(
project=project, name='mail.notify-author', value='1'))
db.session.add(ProjectOption(
project=project, name='mail.notify-addresses-revisions',
value='test@example.com, bar@example.com'))
author = self.create_author('foo@example.com')
author_recipient = '{0} <{1}>'.format(author.name, author.email)
patch_build = self.create_build(
project=project,
source=self.create_source(project, patch=self.create_patch(repository=project.repository)),
author=author,
result=Result.passed,
)
project2 = self.create_project()
db.session.add(ProjectOption(
project=project2, name='mail.notify-author', value='0'))
db.session.add(ProjectOption(
project=project2, name='mail.notify-addresses-revisions',
value='test2@example.com, bar2@example.com'))
author2 = self.create_author('foo2@example.com')
author2_recipient = '{0} <{1}>'.format(author2.name, author2.email)
patch_build2 = self.create_build(
project=project2,
source=self.create_source(project2, patch=self.create_patch(repository=project2.repository)),
author=author2,
result=Result.failed,
)
db.session.commit()
mock_context = mock.Mock(builds=[{'build': patch_build}, {'build': patch_build2}])
recipients = MailNotificationHandler().get_collection_recipients(mock_context)
assert recipients == []
def test_diff_all_failed(self):
project = self.create_project()
db.session.add(ProjectOption(
project=project, name='mail.notify-author', value='1'))
db.session.add(ProjectOption(
project=project, name='mail.notify-addresses-revisions',
value='test@example.com, bar@example.com'))
author = self.create_author('foo@example.com')
author_recipient = '{0} <{1}>'.format(author.name, author.email)
patch_build = self.create_build(
project=project,
source=self.create_source(project, patch=self.create_patch(repository=project.repository)),
author=author,
result=Result.failed,
)
project2 = self.create_project()
db.session.add(ProjectOption(
project=project2, name='mail.notify-author', value='0'))
db.session.add(ProjectOption(
project=project2, name='mail.notify-addresses-revisions',
value='test2@example.com, bar2@example.com'))
author2 = self.create_author('foo2@example.com')
author2_recipient = '{0} <{1}>'.format(author2.name, author2.email)
patch_build2 = self.create_build(
project=project2,
source=self.create_source(project2, patch=self.create_patch(repository=project2.repository)),
author=author2,
result=Result.failed,
)
db.session.commit()
mock_context = mock.Mock(builds=[{'build': patch_build}, {'build': patch_build2}])
recipients = MailNotificationHandler().get_collection_recipients(mock_context)
assert recipients == [author_recipient]
class SendTestCase(TestCase):
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_simple(self, get_collection_recipients):
project = self.create_project(name='test', slug='test')
build = self.create_build(
project,
label='Test diff',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished
)
job = self.create_job(build=build, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
logsource = self.create_logsource(
step=step,
name='console',
)
self.create_logchunk(
source=logsource,
text='hello world',
)
job_link = 'http://example.com/projects/%s/builds/%s/jobs/%s/' % (
project.slug, build.id.hex, job.id.hex,)
log_link = '%slogs/%s/' % (job_link, logsource.id.hex)
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
assert msg.subject == '%s failed - %s' % (
'D1234', job.build.label)
assert msg.recipients == ['foo@example.com', 'Bob <bob@example.com>']
assert msg.extra_headers['Reply-To'] == 'foo@example.com, Bob <bob@example.com>'
assert job_link in msg.html
assert job_link in msg.body
assert log_link in msg.html
assert log_link in msg.body
assert msg.as_string()
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_simple_with_message(self, get_collection_recipients):
project = self.create_project(name='test', slug='test')
build = self.create_build(
project,
label='Test diff',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished
)
job = self.create_job(build=build, result=Result.failed)
test_message = "I'm a test message!"
test_case = self.create_test(job, message=test_message, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
logsource = self.create_logsource(
step=step,
name='console',
)
self.create_logchunk(
source=logsource,
text='hello world',
)
job_link = 'http://example.com/projects/%s/builds/%s/jobs/%s/' % (
project.slug, build.id.hex, job.id.hex,)
log_link = '%slogs/%s/' % (job_link, logsource.id.hex)
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
assert msg.subject == '%s failed - %s' % (
'D1234', job.build.label)
assert msg.recipients == ['foo@example.com', 'Bob <bob@example.com>']
assert msg.extra_headers['Reply-To'] == 'foo@example.com, Bob <bob@example.com>'
assert job_link in msg.html
assert job_link in msg.body
assert log_link in msg.html
assert log_link in msg.body
assert test_message in msg.html
assert test_message in msg.body
assert msg.as_string()
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_simple_null_message(self, get_collection_recipients):
project = self.create_project(name='test', slug='test')
build = self.create_build(
project,
label='Test diff',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished
)
job = self.create_job(build=build, result=Result.failed)
test_case = self.create_test(job, message=None, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
logsource = self.create_logsource(
step=step,
name='console',
)
self.create_logchunk(
source=logsource,
text='hello world',
)
job_link = 'http://example.com/projects/%s/builds/%s/jobs/%s/' % (
project.slug, build.id.hex, job.id.hex,)
log_link = '%slogs/%s/' % (job_link, logsource.id.hex)
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
assert msg.subject == '%s failed - %s' % (
'D1234', job.build.label)
assert msg.recipients == ['foo@example.com', 'Bob <bob@example.com>']
assert msg.extra_headers['Reply-To'] == 'foo@example.com, Bob <bob@example.com>'
assert job_link in msg.html
assert job_link in msg.body
assert log_link in msg.html
assert log_link in msg.body
assert msg.as_string()
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_subject_branch(self, get_collection_recipients):
project = self.create_project(name='test', slug='test')
repo = project.repository
branches = ['master', 'branch1']
revision = self.create_revision(repository=repo, branches=branches)
source = self.create_source(
project=project,
revision=revision,
)
build = self.create_build(
project=project,
source=source,
label='Test diff',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished
)
job = self.create_job(build=build, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
logsource = self.create_logsource(
step=step,
name='console',
)
self.create_logchunk(
source=logsource,
text='hello world',
)
job_link = 'http://example.com/projects/%s/builds/%s/jobs/%s/' % (
project.slug, build.id.hex, job.id.hex,)
log_link = '%slogs/%s/' % (job_link, logsource.id.hex)
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
assert msg.subject == '%s failed - %s' % (
'D1234', job.build.label)
assert msg.recipients == ['foo@example.com', 'Bob <bob@example.com>']
assert msg.extra_headers['Reply-To'] == 'foo@example.com, Bob <bob@example.com>'
assert job_link in msg.html
assert job_link in msg.body
assert log_link in msg.html
assert log_link in msg.body
assert msg.as_string()
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_multiple_sources(self, get_collection_recipients):
project = self.create_project(name='test', slug='test')
build = self.create_build(
project,
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished
)
job = self.create_job(build=build, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
logsource = self.create_logsource(
step=step,
name='console',
)
self.create_logchunk(
source=logsource,
text='hello world',
)
phase2 = self.create_jobphase(job=job, label='other')
step2 = self.create_jobstep(phase=phase2)
logsource2 = self.create_logsource(
step=step2,
name='other',
)
self.create_logchunk(
source=logsource2,
text='hello world',
)
job_link = 'http://example.com/projects/%s/builds/%s/jobs/%s/' % (
project.slug, build.id.hex, job.id.hex,)
log_link1 = '%slogs/%s/' % (job_link, logsource.id.hex)
log_link2 = '%slogs/%s/' % (job_link, logsource2.id.hex)
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
assert msg.subject == '%s failed - %s' % (
'D1234', job.build.label)
assert msg.recipients == ['foo@example.com', 'Bob <bob@example.com>']
assert msg.extra_headers['Reply-To'] == 'foo@example.com, Bob <bob@example.com>'
assert job_link in msg.html
assert job_link in msg.body
assert log_link1 in msg.html
assert log_link1 in msg.body
assert log_link2 in msg.html
assert log_link2 in msg.body
assert msg.as_string()
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_max_shown(self, get_collection_recipients):
project = self.create_project(name='test', slug='test')
build = self.create_build(
project,
label='Test diff',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished
)
job = self.create_job(build=build, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
max_shown = current_app.config.get('MAX_SHOWN_ITEMS_PER_BUILD_MAIL', 3)
total_test_count = max_shown + 1
test_cases = []
for i in range(total_test_count):
test_cases.append(self.create_test(
package='test.group.ClassName',
name='test.group.ClassName.test_foo{}'.format(i),
job=job,
duration=134,
result=Result.failed,
))
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
text_content = msg.body
html_content = msg.html
assert text_content
shown_test_count = 0
for test_case in test_cases:
test_link = build_context_lib._get_test_case_uri(test_case)
if test_link in text_content:
shown_test_count += 1
assert shown_test_count == max_shown
assert html_content
assert 'Showing {} out of <strong style="font-weight: bold">{}</strong>'.format(max_shown, total_test_count) in html_content
assert 'See all failing tests (1 remaining)' in html_content
shown_test_count = 0
for test_case in test_cases:
test_link = build_context_lib._get_test_case_uri(test_case)
if test_link in html_content:
shown_test_count += 1
assert shown_test_count == max_shown
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_max_shown_multiple_builds(self, get_collection_recipients):
collection_id = uuid.uuid4()
project = self.create_project(name='test', slug='test')
build = self.create_build(
project,
label='Test diff',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished,
collection_id=collection_id,
)
job = self.create_job(build=build, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
max_shown = current_app.config.get('MAX_SHOWN_ITEMS_PER_BUILD_MAIL', 3)
total_test_count = max_shown + 1
test_cases = []
for i in range(total_test_count):
test_cases.append(self.create_test(
package='test.group.ClassName',
name='test.group.ClassName.test_foo{}'.format(i),
job=job,
duration=134,
result=Result.failed,
))
build2 = self.create_build(
project,
label='Test diff 2',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished,
collection_id=collection_id,
)
job2 = self.create_job(build=build2, result=Result.failed)
phase2 = self.create_jobphase(job=job2)
step2 = self.create_jobstep(phase=phase2)
test_case2 = self.create_test(
package='test.group.ClassName',
name='test.group.ClassName.test_bar',
job=job2,
duration=134,
result=Result.failed,
)
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
text_content = msg.body
html_content = msg.html
assert 'See all failing tests (1 remaining)' in text_content
assert build_context_lib._get_test_case_uri(test_case2) in text_content
shown_test_count = 0
for test_case in test_cases:
test_link = build_context_lib._get_test_case_uri(test_case)
if test_link in text_content:
shown_test_count += 1
assert shown_test_count == max_shown
assert html_content
assert 'Showing {} out of <strong style="font-weight: bold">{}</strong>'.format(max_shown + 1, total_test_count + 1) in html_content
assert 'See all failing tests (1 remaining)' in html_content
assert build_context_lib._get_test_case_uri(test_case2) in html_content
shown_test_count = 0
for test_case in test_cases:
test_link = build_context_lib._get_test_case_uri(test_case)
if test_link in html_content:
shown_test_count += 1
assert shown_test_count == max_shown
@mock.patch.object(MailNotificationHandler, 'get_collection_recipients')
def test_max_shown_log(self, get_collection_recipients):
project = self.create_project(name='test', slug='test')
build = self.create_build(
project,
label='Test diff',
date_started=datetime.utcnow(),
result=Result.failed,
status=Status.finished
)
job = self.create_job(build=build, result=Result.failed)
phase = self.create_jobphase(job=job)
step = self.create_jobstep(phase=phase)
max_shown = current_app.config.get('MAX_SHOWN_ITEMS_PER_BUILD_MAIL', 3)
total_log_count = max_shown + 1
log_sources = []
for i in range(total_log_count):
log_source = self.create_logsource(
step=step,
name='console' + str(i),
)
self.create_logchunk(
source=log_source,
text='hello world',
)
log_sources.append(log_source)
get_collection_recipients.return_value = ['foo@example.com', 'Bob <bob@example.com>']
build_finished_handler(build.id)
assert len(self.outbox) == 1
msg = self.outbox[0]
text_content = msg.body
html_content = msg.html
job_link = 'http://example.com/projects/%s/builds/%s/jobs/%s/' % (
project.slug, build.id.hex, job.id.hex,)
shown_log_count = 0
for log_source in log_sources:
log_link = '%slogs/%s/' % (job_link, log_source.id.hex)
if log_link in text_content:
shown_log_count += 1
assert shown_log_count == max_shown
shown_log_count = 0
for log_source in log_sources:
log_link = '%slogs/%s/' % (job_link, log_source.id.hex)
if log_link in html_content:
shown_log_count += 1
assert shown_log_count == max_shown
class GetBuildOptionsTestCase(TestCase):
def test_simple(self):
project = self.create_project()
plan = self.create_plan(project)
build = self.create_build(project, result=Result.failed)
job = self.create_job(build, result=Result.failed)
db.session.add(ItemOption(
item_id=plan.id,
name='mail.notify-author',
value='0',
))
db.session.add(ProjectOption(
project_id=project.id,
name='mail.notify-author',
value='1',
))
db.session.add(ProjectOption(
project_id=project.id,
name='mail.notify-addresses',
value='foo@example.com',
))
db.session.flush()
self.create_job_plan(job, plan)
db.session.commit()
handler = MailNotificationHandler()
assert handler.get_build_options(build) == {
'mail.notify-addresses': {'foo@example.com'},
'mail.notify-addresses-revisions': set(),
'mail.notify-author': False,
}
def test_multiple_jobs(self):
project = self.create_project()
build = self.create_build(project, result=Result.failed)
job1 = self.create_job(build, result=Result.failed)
job2 = self.create_job(build, result=Result.failed)
plan1 = self.create_plan(project)
plan2 = self.create_plan(project)
# Plan1 options.
db.session.add(ItemOption(
item_id=plan1.id,
name='mail.notify-addresses',
value='plan1@example.com',
))
db.session.add(ItemOption(
item_id=plan1.id,
name='mail.notify-author',
value='0',
))
# Plan2 options.
db.session.add(ItemOption(
item_id=plan2.id,
name='mail.notify-addresses',
value='plan2@example.com',
))
db.session.add(ItemOption(
item_id=plan2.id,
name='mail.notify-author',
value='1',
))
# Project options (notify-author is set to test that plan options can
# override it).
db.session.add(ProjectOption(
project_id=project.id,
name='mail.notify-author',
value='0',
))
# Set notify addresses to verify that it is not used when all jobs
# override it.
db.session.add(ProjectOption(
project_id=project.id,
name='mail.notify-addresses',
value='foo@example.com',
))
db.session.flush()
for job, plan in [(job1, plan1), (job2, plan2)]:
self.create_job_plan(job, plan)
db.session.commit()
handler = MailNotificationHandler()
assert handler.get_build_options(build) == {
'mail.notify-addresses': {'plan1@example.com', 'plan2@example.com'},
'mail.notify-addresses-revisions': set(),
'mail.notify-author': True,
}
| 36.592297 | 140 | 0.610401 | 3,160 | 27,554 | 5.143038 | 0.064873 | 0.072606 | 0.04098 | 0.029781 | 0.891706 | 0.850418 | 0.835405 | 0.813808 | 0.804762 | 0.798363 | 0 | 0.008519 | 0.275749 | 27,554 | 752 | 141 | 36.640957 | 0.805873 | 0.006859 | 0 | 0.7568 | 0 | 0 | 0.134147 | 0.027999 | 0 | 0 | 0 | 0 | 0.1312 | 1 | 0.0288 | false | 0.0064 | 0.0176 | 0 | 0.0544 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b942d1582bb748d4d7517f084589bec1c6e41a4f | 7,735 | py | Python | etl/parsers/etw/Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task.py | IMULMUL/etl-parser | 76b7c046866ce0469cd129ee3f7bb3799b34e271 | [
"Apache-2.0"
] | 104 | 2020-03-04T14:31:31.000Z | 2022-03-28T02:59:36.000Z | etl/parsers/etw/Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task.py | IMULMUL/etl-parser | 76b7c046866ce0469cd129ee3f7bb3799b34e271 | [
"Apache-2.0"
] | 7 | 2020-04-20T09:18:39.000Z | 2022-03-19T17:06:19.000Z | etl/parsers/etw/Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task.py | IMULMUL/etl-parser | 76b7c046866ce0469cd129ee3f7bb3799b34e271 | [
"Apache-2.0"
] | 16 | 2020-03-05T18:55:59.000Z | 2022-03-01T10:19:28.000Z | # -*- coding: utf-8 -*-
"""
Microsoft-Windows-Mobile-Broadband-Experience-Parser-Task
GUID : 28e25b07-c47f-473d-8b24-2e171cca808a
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1000, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1000_0(Etw):
pattern = Struct(
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1002, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1002_0(Etw):
pattern = Struct(
"id" / WString,
"culture" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1003, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1003_0(Etw):
pattern = Struct(
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1004, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1004_0(Etw):
pattern = Struct(
"id" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1005, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1005_0(Etw):
pattern = Struct(
"path" / WString,
"error" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1006, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1006_0(Etw):
pattern = Struct(
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1007, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1007_0(Etw):
pattern = Struct(
"tag" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1008, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1008_0(Etw):
pattern = Struct(
"tag" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1009, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1009_0(Etw):
pattern = Struct(
"tag" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1010, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1010_0(Etw):
pattern = Struct(
"path" / WString,
"error" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1011, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1011_0(Etw):
pattern = Struct(
"profile" / WString,
"path" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1012, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1012_0(Etw):
pattern = Struct(
"profile" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1013, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1013_0(Etw):
pattern = Struct(
"profile" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1014, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1014_0(Etw):
pattern = Struct(
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1015, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1015_0(Etw):
pattern = Struct(
"id" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1016, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1016_0(Etw):
pattern = Struct(
"id" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1017, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1017_0(Etw):
pattern = Struct(
"profile" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1020, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1020_0(Etw):
pattern = Struct(
"providerName" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1021, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1021_0(Etw):
pattern = Struct(
"providerName" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1022, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1022_0(Etw):
pattern = Struct(
"id" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1023, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1023_0(Etw):
pattern = Struct(
"id" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1030, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1030_0(Etw):
pattern = Struct(
"providerName" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1031, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1031_0(Etw):
pattern = Struct(
"providerName" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1032, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1032_0(Etw):
pattern = Struct(
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1033, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1033_0(Etw):
pattern = Struct(
"culture" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=1034, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_1034_0(Etw):
pattern = Struct(
"culture" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=2001, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_2001_0(Etw):
pattern = Struct(
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=3000, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_3000_0(Etw):
pattern = Struct(
"error" / Int32ul,
"hresult" / Int32sl
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=3001, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_3001_0(Etw):
pattern = Struct(
"path" / WString
)
@declare(guid=guid("28e25b07-c47f-473d-8b24-2e171cca808a"), event_id=3002, version=0)
class Microsoft_Windows_Mobile_Broadband_Experience_Parser_Task_3002_0(Etw):
pattern = Struct(
"path" / WString,
"error" / Int32ul,
"hresult" / Int32sl
)
| 31.189516 | 123 | 0.72256 | 947 | 7,735 | 5.616684 | 0.089757 | 0.093251 | 0.12822 | 0.180673 | 0.914458 | 0.914458 | 0.884565 | 0.868772 | 0.868772 | 0.868772 | 0 | 0.1583 | 0.16044 | 7,735 | 247 | 124 | 31.315789 | 0.660764 | 0.016031 | 0 | 0.469613 | 0 | 0 | 0.185214 | 0.142068 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.022099 | 0 | 0.353591 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b94a4914a2dcebdd4398cb723f1fd9c5ecaba6ae | 657 | py | Python | x64/Release/test.py | MuffinSpawn/Leica | afb89ce88e4d8ee8841e4e35e6719b0723cb164a | [
"MIT"
] | 4 | 2021-06-11T06:10:14.000Z | 2021-11-29T06:58:57.000Z | x64/Release/test.py | MuffinSpawn/Leica | afb89ce88e4d8ee8841e4e35e6719b0723cb164a | [
"MIT"
] | 10 | 2018-02-01T16:39:13.000Z | 2018-02-22T23:12:12.000Z | x64/Release/test.py | MuffinSpawn/Leica | afb89ce88e4d8ee8841e4e35e6719b0723cb164a | [
"MIT"
] | 3 | 2018-12-29T09:02:19.000Z | 2020-07-20T06:09:55.000Z | import CESAPI
status = b'\x00\x00\x00\x00'
enc = CESAPI.Encoder()
enc.Initialize()
print(enc.getPacket())
packet = b''.join((b'\x10', enc.getPacket()[1:], status))
dec = CESAPI.Decoder()
print(packet)
message = dec.decode(packet)
ans = message.getCommand()
print(ans.packetHeader.lPacketSize)
print(ans.packetHeader.type)
print(ans.command)
print(ans.status)
enc.GetAT4xxInfo()
print(enc.getPacket())
packet = b''.join((b'\x10', enc.getPacket()[1:], status))
print(packet)
dec = CESAPI.Decoder()
message = dec.decode(packet)
ans = message.getCommand()
print(ans.packetHeader.lPacketSize)
print(ans.packetHeader.type)
print(ans.command)
print(ans.status)
| 22.655172 | 57 | 0.73516 | 91 | 657 | 5.307692 | 0.274725 | 0.132505 | 0.165631 | 0.095238 | 0.732919 | 0.732919 | 0.732919 | 0.732919 | 0.732919 | 0.732919 | 0 | 0.024834 | 0.08067 | 657 | 28 | 58 | 23.464286 | 0.774834 | 0 | 0 | 0.8 | 0 | 0 | 0.03653 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04 | 0 | 0.04 | 0.48 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
b94e858f7c8fa24d80c59109b79839b0ef5bd93f | 7,836 | py | Python | camera_state_predict/data_integrate.py | qqxx6661/EaML | beddfaea6dda964171ed7b0df9d459e4bc83fb71 | [
"MIT"
] | null | null | null | camera_state_predict/data_integrate.py | qqxx6661/EaML | beddfaea6dda964171ed7b0df9d459e4bc83fb71 | [
"MIT"
] | null | null | null | camera_state_predict/data_integrate.py | qqxx6661/EaML | beddfaea6dda964171ed7b0df9d459e4bc83fb71 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# coding=utf-8
import cv2
import numpy as np
import csv
import time
def (output_list):
if len(output_list) == 2:
if output_list[0] == '0' and output_list[1] == '0':
return '0'
if output_list[0] == '1' and output_list[1] == '0':
return '1'
if output_list[0] == '0' and output_list[1] == '1':
return '2'
if output_list[0] == '1' and output_list[1] == '1':
return '3'
if len(output_list) == 4:
if output_list[0] == '0' and output_list[1] == '0' and output_list[2] == '0' and output_list[3] == '0':
return '0'
if output_list[0] == '1' and output_list[1] == '0' and output_list[2] == '0' and output_list[3] == '0':
return '1'
if output_list[0] == '0' and output_list[1] == '1' and output_list[2] == '0' and output_list[3] == '0':
return '2'
if output_list[0] == '1' and output_list[1] == '1' and output_list[2] == '0' and output_list[3] == '0':
return '3'
if output_list[0] == '0' and output_list[1] == '0' and output_list[2] == '1' and output_list[3] == '0':
return '4'
if output_list[0] == '0' and output_list[1] == '1' and output_list[2] == '1' and output_list[3] == '0':
return '5'
if output_list[0] == '0' and output_list[1] == '0' and output_list[2] == '0' and output_list[3] == '1':
return '6'
if output_list[0] == '0' and output_list[1] == '0' and output_list[2] == '1' and output_list[3] == '1':
return '7'
# 有特殊帧四个全开,人工识别应该去掉
if output_list[0] == '1' and output_list[1] == '1' and output_list[2] == '1' and output_list[3] == '1':
return '8'
def data_integrate(list_file):
input_each = []
inputs = []
outputs = []
outputs_upper = []
if len(list_file) == 2:
for cam_id, file_src in enumerate(list_file):
if cam_id == 0: # 双摄像头0号,只需提取右边
print("读取文件:", file_src)
with open(file_src) as file:
for line in file:
tokens = line.strip().split(',')
input_each.append(tokens[2])
input_each.append(tokens[4])
input_each.append(tokens[5])
input_each.append(int(tokens[3]) * 100) # 若为1则填100
input_each.append(tokens[7])
inputs.append(input_each)
input_each = []
outputs.append([tokens[1]]) # 先创建为数组
outputs_upper.append([tokens[3]]) # 先创建为数组
if cam_id == 1:
print("读取文件:", file_src)
with open(file_src) as file:
for line_number, line in enumerate(file):
tokens = line.strip().split(',')
input_each.append(tokens[2])
input_each.append(tokens[4])
input_each.append(tokens[5])
input_each.append(tokens[6])
input_each.append(int(tokens[3]) * 100) # 若为1则填100
inputs[line_number].extend(input_each)
input_each = []
outputs[line_number].append(tokens[1])
outputs_upper[line_number].append(tokens[3])
if len(list_file) == 4:
for cam_id, file_src in enumerate(list_file):
if cam_id == 0:
print("读取文件:", file_src)
with open(file_src) as file:
for line in file:
tokens = line.strip().split(',')
input_each.append(tokens[2])
input_each.append(tokens[4])
input_each.append(tokens[5])
input_each.append(int(tokens[3]) * 100) # 若为1则填100
input_each.append(tokens[7])
input_each.append(0)
input_each.append(0)
inputs.append(input_each)
input_each = []
outputs.append([tokens[1]]) # 先创建为数组
outputs_upper.append([tokens[3]]) # 先创建为数组
if cam_id == 1:
print("读取文件:", file_src)
with open(file_src) as file:
for line_number, line in enumerate(file):
tokens = line.strip().split(',')
input_each.append(tokens[2])
input_each.append(tokens[4])
input_each.append(tokens[5])
input_each.append(tokens[6])
input_each.append(int(tokens[3]) * 100) # 若为1则填100
input_each.append(tokens[7])
input_each.append(0)
inputs[line_number].extend(input_each)
input_each = []
outputs[line_number].append(tokens[1])
outputs_upper[line_number].append(tokens[3])
if cam_id == 2:
print("读取文件:", file_src)
with open(file_src) as file:
for line_number, line in enumerate(file):
tokens = line.strip().split(',')
input_each.append(tokens[2])
input_each.append(tokens[4])
input_each.append(tokens[5])
input_each.append(0)
input_each.append(tokens[6])
input_each.append(int(tokens[3]) * 100) # 若为1则填100
input_each.append(tokens[7])
inputs[line_number].extend(input_each)
input_each = []
outputs[line_number].append(tokens[1])
outputs_upper[line_number].append(tokens[3])
if cam_id == 3:
print("读取文件:", file_src)
with open(file_src) as file:
for line_number, line in enumerate(file):
tokens = line.strip().split(',')
input_each.append(tokens[2])
input_each.append(tokens[4])
input_each.append(tokens[5])
input_each.append(0)
input_each.append(0)
input_each.append(tokens[6])
input_each.append(int(tokens[3]) * 100) # 若为1则填100
inputs[line_number].extend(input_each)
input_each = []
outputs[line_number].append(tokens[1])
outputs_upper[line_number].append(tokens[3])
# print(inputs)
# print(outputs)
# print(outputs_upper)
row = []
with open('data/train_' + str(len(list_file)) + 'cam.csv', 'a', newline='') as f: # newline不多空行, a是追加模式
f_csv = csv.writer(f)
for i in range(len(outputs)):
row.append(_output(outputs[i]))
row.append(_output(outputs_upper[i]))
for j in range(len(inputs[0])):
row.append(inputs[i][j])
f_csv.writerow(row)
row = []
if __name__ == "__main__":
global_start = time.time()
list_file_name = ["data/4cam_scene1/data_2017-08-08 17-59-16_0.csv",
"data/4cam_scene1/data_2017-08-08 17-59-16_1.csv",
"data/4cam_scene1/data_2017-08-08 17-59-17_0.csv",
"data/4cam_scene1/data_2017-08-08 17-59-17_1.csv"]
data_integrate(list_file_name)
global_end = time.time()
print("global time:", global_end - global_start) | 45.55814 | 111 | 0.479071 | 920 | 7,836 | 3.878261 | 0.094565 | 0.128644 | 0.159753 | 0.153027 | 0.830437 | 0.830157 | 0.826513 | 0.826513 | 0.822029 | 0.822029 | 0 | 0.058203 | 0.394844 | 7,836 | 172 | 112 | 45.55814 | 0.694222 | 0.02782 | 0 | 0.673203 | 0 | 0 | 0.0421 | 0.01684 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.026144 | null | null | 0.045752 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b96343a03e574245b2e8beecf1e9531e8d9ae8f4 | 40,663 | py | Python | huaweicloud-sdk-live/huaweicloudsdklive/v2/live_client.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-live/huaweicloudsdklive/v2/live_client.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-live/huaweicloudsdklive/v2/live_client.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class LiveClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(LiveClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdklive.v2.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls)
if clazz.__name__ != "LiveClient":
raise TypeError("client type error, support client type is LiveClient")
return ClientBuilder(clazz)
def list_bandwidth_detail(self, request):
"""查询播放带宽趋势接口
查询播放域名带宽数据。 最大查询跨度31天,最大查询周期90天。
:param ListBandwidthDetailRequest request
:return: ListBandwidthDetailResponse
"""
return self.list_bandwidth_detail_with_http_info(request)
def list_bandwidth_detail_with_http_info(self, request):
"""查询播放带宽趋势接口
查询播放域名带宽数据。 最大查询跨度31天,最大查询周期90天。
:param ListBandwidthDetailRequest request
:return: ListBandwidthDetailResponse
"""
all_params = ['play_domains', 'app', 'stream', 'region', 'isp', 'interval', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'play_domains' in local_var_params:
query_params.append(('play_domains', local_var_params['play_domains']))
collection_formats['play_domains'] = 'csv'
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
collection_formats['region'] = 'csv'
if 'isp' in local_var_params:
query_params.append(('isp', local_var_params['isp']))
collection_formats['isp'] = 'csv'
if 'interval' in local_var_params:
query_params.append(('interval', local_var_params['interval']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/bandwidth/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListBandwidthDetailResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_domain_bandwidth_peak(self, request):
"""查询播放带宽峰值接口
查询指定时间范围内播放带宽峰值。 最大查询跨度31天,最大查询周期90天。
:param ListDomainBandwidthPeakRequest request
:return: ListDomainBandwidthPeakResponse
"""
return self.list_domain_bandwidth_peak_with_http_info(request)
def list_domain_bandwidth_peak_with_http_info(self, request):
"""查询播放带宽峰值接口
查询指定时间范围内播放带宽峰值。 最大查询跨度31天,最大查询周期90天。
:param ListDomainBandwidthPeakRequest request
:return: ListDomainBandwidthPeakResponse
"""
all_params = ['play_domains', 'app', 'stream', 'region', 'isp', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'play_domains' in local_var_params:
query_params.append(('play_domains', local_var_params['play_domains']))
collection_formats['play_domains'] = 'csv'
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
collection_formats['region'] = 'csv'
if 'isp' in local_var_params:
query_params.append(('isp', local_var_params['isp']))
collection_formats['isp'] = 'csv'
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/bandwidth/peak',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDomainBandwidthPeakResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_domain_traffic_detail(self, request):
"""查询播放流量趋势接口
查询播放域名流量数据。 最大查询跨度31天,最大查询周期90天。
:param ListDomainTrafficDetailRequest request
:return: ListDomainTrafficDetailResponse
"""
return self.list_domain_traffic_detail_with_http_info(request)
def list_domain_traffic_detail_with_http_info(self, request):
"""查询播放流量趋势接口
查询播放域名流量数据。 最大查询跨度31天,最大查询周期90天。
:param ListDomainTrafficDetailRequest request
:return: ListDomainTrafficDetailResponse
"""
all_params = ['play_domains', 'app', 'stream', 'region', 'isp', 'interval', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'play_domains' in local_var_params:
query_params.append(('play_domains', local_var_params['play_domains']))
collection_formats['play_domains'] = 'csv'
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
collection_formats['region'] = 'csv'
if 'isp' in local_var_params:
query_params.append(('isp', local_var_params['isp']))
collection_formats['isp'] = 'csv'
if 'interval' in local_var_params:
query_params.append(('interval', local_var_params['interval']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/traffic/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDomainTrafficDetailResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_domain_traffic_summary(self, request):
"""查询播放流量汇总接口
查询指定时间范围内流量汇总量。 最大查询跨度31天,最大查询周期90天。
:param ListDomainTrafficSummaryRequest request
:return: ListDomainTrafficSummaryResponse
"""
return self.list_domain_traffic_summary_with_http_info(request)
def list_domain_traffic_summary_with_http_info(self, request):
"""查询播放流量汇总接口
查询指定时间范围内流量汇总量。 最大查询跨度31天,最大查询周期90天。
:param ListDomainTrafficSummaryRequest request
:return: ListDomainTrafficSummaryResponse
"""
all_params = ['play_domains', 'app', 'stream', 'region', 'isp', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'play_domains' in local_var_params:
query_params.append(('play_domains', local_var_params['play_domains']))
collection_formats['play_domains'] = 'csv'
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
collection_formats['region'] = 'csv'
if 'isp' in local_var_params:
query_params.append(('isp', local_var_params['isp']))
collection_formats['isp'] = 'csv'
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/traffic/summary',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDomainTrafficSummaryResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_history_streams(self, request):
"""查询历史推流列表接口
查询历史推流列表。 最大查询跨度1天,最大查询周期7天。
:param ListHistoryStreamsRequest request
:return: ListHistoryStreamsResponse
"""
return self.list_history_streams_with_http_info(request)
def list_history_streams_with_http_info(self, request):
"""查询历史推流列表接口
查询历史推流列表。 最大查询跨度1天,最大查询周期7天。
:param ListHistoryStreamsRequest request
:return: ListHistoryStreamsResponse
"""
all_params = ['domain', 'app', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'domain' in local_var_params:
query_params.append(('domain', local_var_params['domain']))
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/history/streams',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListHistoryStreamsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_query_http_code(self, request):
"""查询直播拉流HTTP状态码接口
查询直播拉流HTTP状态码接口。 获取加速域名1分钟粒度的HTTP返回码 最大查询跨度不能超过24小时,最大查询周期7天。
:param ListQueryHttpCodeRequest request
:return: ListQueryHttpCodeResponse
"""
return self.list_query_http_code_with_http_info(request)
def list_query_http_code_with_http_info(self, request):
"""查询直播拉流HTTP状态码接口
查询直播拉流HTTP状态码接口。 获取加速域名1分钟粒度的HTTP返回码 最大查询跨度不能超过24小时,最大查询周期7天。
:param ListQueryHttpCodeRequest request
:return: ListQueryHttpCodeResponse
"""
all_params = ['play_domains', 'code', 'region', 'isp', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'play_domains' in local_var_params:
query_params.append(('play_domains', local_var_params['play_domains']))
collection_formats['play_domains'] = 'csv'
if 'code' in local_var_params:
query_params.append(('code', local_var_params['code']))
collection_formats['code'] = 'csv'
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
collection_formats['region'] = 'csv'
if 'isp' in local_var_params:
query_params.append(('isp', local_var_params['isp']))
collection_formats['isp'] = 'csv'
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/httpcodes',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListQueryHttpCodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_record_data(self, request):
"""查询录制用量接口
查询直播租户每小时录制的最大并发数,计算1小时内每分钟的并发总路数,取最大值做为统计值。 最大查询跨度31天,最大查询周期90天。
:param ListRecordDataRequest request
:return: ListRecordDataResponse
"""
return self.list_record_data_with_http_info(request)
def list_record_data_with_http_info(self, request):
"""查询录制用量接口
查询直播租户每小时录制的最大并发数,计算1小时内每分钟的并发总路数,取最大值做为统计值。 最大查询跨度31天,最大查询周期90天。
:param ListRecordDataRequest request
:return: ListRecordDataResponse
"""
all_params = ['start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/record',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListRecordDataResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_snapshot_data(self, request):
"""查询截图用量接口
查询直播域名每小时的截图数量。 最大查询跨度31天,最大查询周期90天。
:param ListSnapshotDataRequest request
:return: ListSnapshotDataResponse
"""
return self.list_snapshot_data_with_http_info(request)
def list_snapshot_data_with_http_info(self, request):
"""查询截图用量接口
查询直播域名每小时的截图数量。 最大查询跨度31天,最大查询周期90天。
:param ListSnapshotDataRequest request
:return: ListSnapshotDataResponse
"""
all_params = ['publish_domain', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'publish_domain' in local_var_params:
query_params.append(('publish_domain', local_var_params['publish_domain']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/snapshot',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSnapshotDataResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_transcode_data(self, request):
"""查询转码用量接口
查询直播域名每小时的转码时长数据。 最大查询跨度31天,最大查询周期90天。
:param ListTranscodeDataRequest request
:return: ListTranscodeDataResponse
"""
return self.list_transcode_data_with_http_info(request)
def list_transcode_data_with_http_info(self, request):
"""查询转码用量接口
查询直播域名每小时的转码时长数据。 最大查询跨度31天,最大查询周期90天。
:param ListTranscodeDataRequest request
:return: ListTranscodeDataResponse
"""
all_params = ['publish_domain', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'publish_domain' in local_var_params:
query_params.append(('publish_domain', local_var_params['publish_domain']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/transcode',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListTranscodeDataResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_users_of_stream(self, request):
"""查询观众趋势接口
查询观众趋势。 最大查询跨度7天,最大查询周期90天。
:param ListUsersOfStreamRequest request
:return: ListUsersOfStreamResponse
"""
return self.list_users_of_stream_with_http_info(request)
def list_users_of_stream_with_http_info(self, request):
"""查询观众趋势接口
查询观众趋势。 最大查询跨度7天,最大查询周期90天。
:param ListUsersOfStreamRequest request
:return: ListUsersOfStreamResponse
"""
all_params = ['play_domain', 'app', 'stream', 'isp', 'region', 'interval', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'play_domain' in local_var_params:
query_params.append(('play_domain', local_var_params['play_domain']))
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'isp' in local_var_params:
query_params.append(('isp', local_var_params['isp']))
collection_formats['isp'] = 'csv'
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
collection_formats['region'] = 'csv'
if 'interval' in local_var_params:
query_params.append(('interval', local_var_params['interval']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/user',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListUsersOfStreamResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_stream_count(self, request):
"""查询域名维度推流路数接口
查询域名维度推流路数接口。 最大查询跨度31天,最大查询周期90天。
:param ShowStreamCountRequest request
:return: ShowStreamCountResponse
"""
return self.show_stream_count_with_http_info(request)
def show_stream_count_with_http_info(self, request):
"""查询域名维度推流路数接口
查询域名维度推流路数接口。 最大查询跨度31天,最大查询周期90天。
:param ShowStreamCountRequest request
:return: ShowStreamCountResponse
"""
all_params = ['publish_domains', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'publish_domains' in local_var_params:
query_params.append(('publish_domains', local_var_params['publish_domains']))
collection_formats['publish_domains'] = 'csv'
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/stream-count',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowStreamCountResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_stream_portrait(self, request):
"""查询播放画像信息接口
查询播放画像信息。 最大查询跨度1天,最大查询周期31天。
:param ShowStreamPortraitRequest request
:return: ShowStreamPortraitResponse
"""
return self.show_stream_portrait_with_http_info(request)
def show_stream_portrait_with_http_info(self, request):
"""查询播放画像信息接口
查询播放画像信息。 最大查询跨度1天,最大查询周期31天。
:param ShowStreamPortraitRequest request
:return: ShowStreamPortraitResponse
"""
all_params = ['play_domain', 'time', 'stream']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'play_domain' in local_var_params:
query_params.append(('play_domain', local_var_params['play_domain']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'time' in local_var_params:
query_params.append(('time', local_var_params['time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/stream-portraits',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowStreamPortraitResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_up_bandwidth(self, request):
"""查询上行带宽数据接口
查询上行带宽数据。 最大查询跨度31天,最大查询周期90天。
:param ShowUpBandwidthRequest request
:return: ShowUpBandwidthResponse
"""
return self.show_up_bandwidth_with_http_info(request)
def show_up_bandwidth_with_http_info(self, request):
"""查询上行带宽数据接口
查询上行带宽数据。 最大查询跨度31天,最大查询周期90天。
:param ShowUpBandwidthRequest request
:return: ShowUpBandwidthResponse
"""
all_params = ['publish_domains', 'app', 'stream', 'region', 'isp', 'interval', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'publish_domains' in local_var_params:
query_params.append(('publish_domains', local_var_params['publish_domains']))
collection_formats['publish_domains'] = 'csv'
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
collection_formats['region'] = 'csv'
if 'isp' in local_var_params:
query_params.append(('isp', local_var_params['isp']))
collection_formats['isp'] = 'csv'
if 'interval' in local_var_params:
query_params.append(('interval', local_var_params['interval']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/up-bandwidth/detail',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowUpBandwidthResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_single_stream_bitrate(self, request):
"""查询推流码率数据接口
查询推流监控码率数据接口。 最大查询跨度6小时,最大查询周期7天。
:param ListSingleStreamBitrateRequest request
:return: ListSingleStreamBitrateResponse
"""
return self.list_single_stream_bitrate_with_http_info(request)
def list_single_stream_bitrate_with_http_info(self, request):
"""查询推流码率数据接口
查询推流监控码率数据接口。 最大查询跨度6小时,最大查询周期7天。
:param ListSingleStreamBitrateRequest request
:return: ListSingleStreamBitrateResponse
"""
all_params = ['domain', 'app', 'stream', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'domain' in local_var_params:
query_params.append(('domain', local_var_params['domain']))
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/stream/bitrate',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSingleStreamBitrateResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_single_stream_framerate(self, request):
"""查询推流帧率数据接口
查询推流帧率数据接口。 最大查询跨度6小时,最大查询周期7天。
:param ListSingleStreamFramerateRequest request
:return: ListSingleStreamFramerateResponse
"""
return self.list_single_stream_framerate_with_http_info(request)
def list_single_stream_framerate_with_http_info(self, request):
"""查询推流帧率数据接口
查询推流帧率数据接口。 最大查询跨度6小时,最大查询周期7天。
:param ListSingleStreamFramerateRequest request
:return: ListSingleStreamFramerateResponse
"""
all_params = ['domain', 'app', 'stream', 'start_time', 'end_time']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'domain' in local_var_params:
query_params.append(('domain', local_var_params['domain']))
if 'app' in local_var_params:
query_params.append(('app', local_var_params['app']))
if 'stream' in local_var_params:
query_params.append(('stream', local_var_params['stream']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = ["X-request-id"]
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/stats/stream/framerate',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListSingleStreamFramerateResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type)
| 34.401861 | 113 | 0.632565 | 4,216 | 40,663 | 5.721774 | 0.058586 | 0.06301 | 0.110268 | 0.053061 | 0.881358 | 0.874974 | 0.859636 | 0.830411 | 0.822493 | 0.822493 | 0 | 0.00418 | 0.270492 | 40,663 | 1,181 | 114 | 34.430991 | 0.809028 | 0.121437 | 0 | 0.816298 | 0 | 0 | 0.125222 | 0.028618 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04558 | false | 0 | 0.013812 | 0 | 0.109116 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b9698d7d6e31baa49df4d53a3c4ab2c6e6f0ef2d | 4,902 | py | Python | process_data.py | alisonsneyd/Stopping-Criteria-Systematic-Reviews | 2231da55935a5ab6b3a124ff95df1acccbf9ec24 | [
"MIT"
] | null | null | null | process_data.py | alisonsneyd/Stopping-Criteria-Systematic-Reviews | 2231da55935a5ab6b3a124ff95df1acccbf9ec24 | [
"MIT"
] | null | null | null | process_data.py | alisonsneyd/Stopping-Criteria-Systematic-Reviews | 2231da55935a5ab6b3a124ff95df1acccbf9ec24 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@author: Alison Sneyd
This code copies the required files from the tar-master directory (available at https://github.com/CLEF-TAR/tar) to a
new directory.
"""
# IMPORTS
import os
from shutil import copy
import glob
# COPY RELEVANCE FILE
os.mkdir("data/relevance")
copy('data/tar-master/2017-TAR/testing/qrels/qrel_abs_test.txt', 'data/relevance/')
# COPY RUNS
os.mkdir("data/runs2017_table3")
os.mkdir("data/runs2017_table3/AMC/")
copy("data/tar-master/2017-TAR/participant-runs/AMC/clef-finals/amc.run.res", "data/runs2017_table3/AMC")
os.mkdir("data/runs2017_table3/AUTH/")
copy("data/tar-master/2017-TAR/participant-runs/AUTH/simple-eval/run-1", "data/runs2017_table3/AUTH")
copy("data/tar-master/2017-TAR/participant-runs/AUTH/simple-eval/run-2", "data/runs2017_table3/AUTH")
copy("data/tar-master/2017-TAR/participant-runs/AUTH/simple-eval/run-3", "data/runs2017_table3/AUTH")
copy("data/tar-master/2017-TAR/participant-runs/AUTH/simple-eval/run-4", "data/runs2017_table3/AUTH")
os.mkdir("data/runs2017_table3/CNRS/")
copy("data/tar-master/2017-TAR/participant-runs/CNRS/trec_abrupt_ALL", "data/runs2017_table3/CNRS")
copy("data/tar-master/2017-TAR/participant-runs/CNRS/trec_gradual_ALL", "data/runs2017_table3/CNRS")
copy("data/tar-master/2017-TAR/participant-runs/CNRS/trec_no_AF_ALL", "data/runs2017_table3/CNRS")
copy("data/tar-master/2017-TAR/participant-runs/CNRS/trec_no_AF_full_ALL", "data/runs2017_table3/CNRS")
os.mkdir("data/runs2017_table3/ECNU/")
copy("data/tar-master/2017-TAR/participant-runs/ECNU/run1.res.txt", "data/runs2017_table3/ECNU")
os.mkdir("data/runs2017_table3/NTU/")
copy("data/tar-master/2017-TAR/participant-runs/NTU/test_ranked_run_1.txt", "data/runs2017_table3/NTU")
copy("data/tar-master/2017-TAR/participant-runs/NTU/test_ranked_run_2.txt", "data/runs2017_table3/NTU")
copy("data/tar-master/2017-TAR/participant-runs/NTU/test_ranked_run_3.txt", "data/runs2017_table3/NTU")
os.mkdir("data/runs2017_table3/Padua/")
copy("data/tar-master/2017-TAR/participant-runs/Padua/simple/ims_iafa_m10k150f0m10", "data/runs2017_table3/Padua")
copy("data/tar-master/2017-TAR/participant-runs/Padua/simple/ims_iafap_m10p2f0m10", "data/runs2017_table3/Padua")
copy("data/tar-master/2017-TAR/participant-runs/Padua/simple/ims_iafap_m10p5f0m10", "data/runs2017_table3/Padua")
copy("data/tar-master/2017-TAR/participant-runs/Padua/simple/ims_iafas_m10k50f0m10", "data/runs2017_table3/Padua")
os.mkdir("data/runs2017_table3/QUT/")
copy("data/tar-master/2017-TAR/participant-runs/QUT/coordinateascent_result_bool_ltr_test.txt", "data/runs2017_table3/QUT")
copy("data/tar-master/2017-TAR/participant-runs/QUT/coordinateascent_result_pico_ltr_test.txt", "data/runs2017_table3/QUT")
copy("data/tar-master/2017-TAR/participant-runs/QUT/randomforest_result_bool_ltr_test.txt", "data/runs2017_table3/QUT")
copy("data/tar-master/2017-TAR/participant-runs/QUT/randomforest_result_pico_ltr_test.txt", "data/runs2017_table3/QUT")
os.mkdir("data/runs2017_table3/Sheffield/")
copy("data/tar-master/2017-TAR/participant-runs/Sheffield/Test_Data_Sheffield-run-1", "data/runs2017_table3/Sheffield")
copy("data/tar-master/2017-TAR/participant-runs/Sheffield/Test_Data_Sheffield-run-2", "data/runs2017_table3/Sheffield")
copy("data/tar-master/2017-TAR/participant-runs/Sheffield/Test_Data_Sheffield-run-3", "data/runs2017_table3/Sheffield")
copy("data/tar-master/2017-TAR/participant-runs/Sheffield/Test_Data_Sheffield-run-4", "data/runs2017_table3/Sheffield")
os.mkdir("data/runs2017_table3/UCL/")
copy("data/tar-master/2017-TAR/participant-runs/UCL/run_abstract_test.txt", "data/runs2017_table3/UCL")
copy("data/tar-master/2017-TAR/participant-runs/UCL/run_fulltext_test.txt", "data/runs2017_table3/UCL")
os.mkdir("data/runs2017_table3/UOS-test/")
copy("data/tar-master/2017-TAR/participant-runs/UOS/test/sis.TMAL30Q_BM25.res", "data/runs2017_table3/UOS-test")
copy("data/tar-master/2017-TAR/participant-runs/UOS/test/sis.TMBEST_BM25.res", "data/runs2017_table3/UOS-test")
os.mkdir("data/runs2017_table3/Waterloo/")
copy("data/tar-master/2017-TAR/participant-runs/Waterloo/A-rank-normal.txt", "data/runs2017_table3/Waterloo")
copy("data/tar-master/2017-TAR/participant-runs/Waterloo/B-rank-normal.txt", "data/runs2017_table3/Waterloo")
os.mkdir("data/runs2017_table3/Baseline/")
copy("data/tar-master/2017-TAR/participant-runs/UOS/test/pubmed.random.res", "data/runs2017_table3/Baseline")
copy("data/tar-master/2017-TAR/participant-runs/UOS/test/sis.BM25.res", "data/runs2017_table3/Baseline")
os.rename("data/runs2017_table3/Baseline/sis.BM25.res", "data/runs2017_table3/Baseline/BM25.res") # to match AURC name
# PRINT RESULTS
all_runs = glob.glob('data/runs2017_table3/*/*')
print("Number runs:", len(all_runs))
for run in all_runs:
print(run)
| 52.148936 | 124 | 0.777642 | 759 | 4,902 | 4.874835 | 0.146245 | 0.158919 | 0.238378 | 0.156216 | 0.825135 | 0.76 | 0.744054 | 0.700811 | 0.67973 | 0.669189 | 0 | 0.093521 | 0.055488 | 4,902 | 93 | 125 | 52.709677 | 0.705616 | 0.050796 | 0 | 0 | 0 | 0.285714 | 0.8223 | 0.808885 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.053571 | 0 | 0.053571 | 0.035714 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
b97226201f7fcb082391351948f4bf0c1ba89342 | 3,391 | py | Python | tests/test_note_interval_addition.py | charlottepierce/music_basics | 097a2bb14373662fcb0f6b585abca2913a8affb0 | [
"MIT"
] | 6 | 2018-03-22T03:09:24.000Z | 2021-08-13T16:19:21.000Z | tests/test_note_interval_addition.py | charlottepierce/music_basics | 097a2bb14373662fcb0f6b585abca2913a8affb0 | [
"MIT"
] | 3 | 2018-06-19T02:09:52.000Z | 2018-06-20T00:09:46.000Z | tests/test_note_interval_addition.py | charlottepierce/music_basics | 097a2bb14373662fcb0f6b585abca2913a8affb0 | [
"MIT"
] | 1 | 2018-06-18T19:07:03.000Z | 2018-06-18T19:07:03.000Z | import pytest
from music_essentials import Note, Interval
# Simple additions
def test_valid_addition_simple_major_second():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('M2')
res = n + i
assert (res.pitch == 'D') and (res.octave == 4) and (res.accidental == None)
def test_valid_addition_simple_major_fifth():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('P5')
res = n + i
assert (res.pitch == 'G') and (res.octave == 4) and (res.accidental == None)
def test_valid_addition_simple_minor_third():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('m3')
res = n + i
assert (res.pitch == 'E') and (res.octave == 4) and (res.accidental == 'b')
def test_valid_addition_simple_minor_seventh():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('m7')
res = n + i
assert (res.pitch == 'B') and (res.octave == 4) and (res.accidental == 'b')
def test_valid_addition_simple_unison():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('P1')
res = n + i
assert (res.pitch == 'C') and (res.octave == 4) and (res.accidental == None)
def test_valid_addition_simple_octave():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('P8')
res = n + i
assert (res.pitch == 'C') and (res.octave == 5) and (res.accidental == None)
# Compound additions
def test_valid_addition_compound_major():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('M10')
res = n + i
assert (res.pitch == 'E') and (res.octave == 5) and (res.accidental == None)
def test_valid_addition_compound_minor():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('m14')
res = n + i
assert (res.pitch == 'B') and (res.octave == 5) and (res.accidental == 'b')
def test_valid_addition_compound_augmented():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('aug13')
res = n + i
assert (res.pitch == 'A') and (res.octave == 5) and (res.accidental == '#')
def test_valid_addition_compound_diminished():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('dim14')
res = n + i
assert (res.pitch == 'B') and (res.octave == 5) and (res.accidental == 'bb')
def test_valid_addition_compound_diminished_octave():
n = Note.from_note_string('C4')
i = Interval.from_interval_string('dim8')
res = n + i
assert (res.pitch == 'C') and (res.octave == 5) and (res.accidental == 'b')
def test_small_addition_over_octave():
n = Note.from_note_string('B4')
i = Interval.from_interval_string('m2')
res = n + i
assert (res.pitch == 'C') and (res.octave == 5) and (res.accidental is None)
def test_fifth_addition_over_octave():
n = Note.from_note_string('A4')
i = Interval.from_interval_string('P5')
res = n + i
assert (res.pitch == 'E') and (res.octave == 5) and (res.accidental is None)
# Invalid additions
def test_note_int_add_rejection():
n = Note.from_note_string('A4')
with pytest.raises(TypeError):
n + 1
def test_note_float_add_rejection():
n = Note.from_note_string('A4')
with pytest.raises(TypeError):
n + 7.3
def test_note_str_add_rejection():
n = Note.from_note_string('A4')
with pytest.raises(TypeError):
n + 'interval' | 34.252525 | 80 | 0.662931 | 504 | 3,391 | 4.206349 | 0.140873 | 0.073585 | 0.067925 | 0.098113 | 0.889623 | 0.860377 | 0.792925 | 0.779245 | 0.725 | 0.71934 | 0 | 0.017831 | 0.18962 | 3,391 | 99 | 81 | 34.252525 | 0.753639 | 0.01563 | 0 | 0.417722 | 0 | 0 | 0.028786 | 0 | 0 | 0 | 0 | 0 | 0.164557 | 1 | 0.202532 | false | 0 | 0.025316 | 0 | 0.227848 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b99d43626716b439e199efdf2530c9d6bdcc28be | 1,411 | py | Python | ProjectEuler_008.py | sperek27/Project-Euler | 9ed9eb4d3b492f646c6c7d80ba84e834c7b3d3e7 | [
"MIT"
] | null | null | null | ProjectEuler_008.py | sperek27/Project-Euler | 9ed9eb4d3b492f646c6c7d80ba84e834c7b3d3e7 | [
"MIT"
] | null | null | null | ProjectEuler_008.py | sperek27/Project-Euler | 9ed9eb4d3b492f646c6c7d80ba84e834c7b3d3e7 | [
"MIT"
] | null | null | null | num = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
def product(s):
p = 1
for n in s:
p *= int(n)
return p
arr = []
temp = ""
for c in num:
if c == "0":
if len(temp) > 12:
arr.append(temp)
temp = ""
continue
temp += c
record = 1
for n in arr:
p = product(n[:13])
if p > record:
record = p
for i in range(13, len(n)):
p //= int(n[i-13])
p *= int(n[i])
if p > record:
record = p
print(record)
| 47.033333 | 1,008 | 0.848335 | 74 | 1,411 | 16.175676 | 0.351351 | 0.010025 | 0.012531 | 0.011696 | 0.026734 | 0 | 0 | 0 | 0 | 0 | 0 | 0.79669 | 0.100638 | 1,411 | 29 | 1,009 | 48.655172 | 0.146572 | 0 | 0 | 0.230769 | 0 | 0 | 0.709426 | 0.708717 | 0 | 1 | 0 | 0 | 0 | 1 | 0.038462 | false | 0 | 0 | 0 | 0.076923 | 0.038462 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b9a1648e61b9aaa80413ecfd3fd82ba028b3cc20 | 8,603 | py | Python | tensorflow/contrib/keras/api/keras/backend/__init__.py | uve/tensorflow | e08079463bf43e5963acc41da1f57e95603f8080 | [
"Apache-2.0"
] | 6 | 2022-02-04T18:12:24.000Z | 2022-03-21T23:57:12.000Z | Lib/site-packages/tensorflow/contrib/keras/api/keras/backend/__init__.py | shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings | 1fa4cd6a566c8745f455fc3d2273208f21f88ced | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/tensorflow/contrib/keras/api/keras/backend/__init__.py | shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings | 1fa4cd6a566c8745f455fc3d2273208f21f88ced | [
"bzip2-1.0.6"
] | 1 | 2022-02-08T03:53:23.000Z | 2022-02-08T03:53:23.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras backend API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=redefined-builtin
from tensorflow.python.keras.backend import abs
from tensorflow.python.keras.backend import all
from tensorflow.python.keras.backend import any
from tensorflow.python.keras.backend import arange
from tensorflow.python.keras.backend import argmax
from tensorflow.python.keras.backend import argmin
from tensorflow.python.keras.backend import backend
from tensorflow.python.keras.backend import batch_dot
from tensorflow.python.keras.backend import batch_flatten
from tensorflow.python.keras.backend import batch_get_value
from tensorflow.python.keras.backend import batch_normalization
from tensorflow.python.keras.backend import batch_set_value
from tensorflow.python.keras.backend import bias_add
from tensorflow.python.keras.backend import binary_crossentropy
from tensorflow.python.keras.backend import cast
from tensorflow.python.keras.backend import cast_to_floatx
from tensorflow.python.keras.backend import categorical_crossentropy
from tensorflow.python.keras.backend import clear_session
from tensorflow.python.keras.backend import clip
from tensorflow.python.keras.backend import concatenate
from tensorflow.python.keras.backend import constant
from tensorflow.python.keras.backend import conv1d
from tensorflow.python.keras.backend import conv2d
from tensorflow.python.keras.backend import conv2d_transpose
from tensorflow.python.keras.backend import conv3d
from tensorflow.python.keras.backend import cos
from tensorflow.python.keras.backend import count_params
from tensorflow.python.keras.backend import ctc_batch_cost
from tensorflow.python.keras.backend import ctc_decode
from tensorflow.python.keras.backend import ctc_label_dense_to_sparse
from tensorflow.python.keras.backend import dot
from tensorflow.python.keras.backend import dropout
from tensorflow.python.keras.backend import dtype
from tensorflow.python.keras.backend import elu
from tensorflow.python.keras.backend import epsilon
from tensorflow.python.keras.backend import equal
from tensorflow.python.keras.backend import eval
from tensorflow.python.keras.backend import exp
from tensorflow.python.keras.backend import expand_dims
from tensorflow.python.keras.backend import eye
from tensorflow.python.keras.backend import flatten
from tensorflow.python.keras.backend import floatx
from tensorflow.python.keras.backend import foldl
from tensorflow.python.keras.backend import foldr
from tensorflow.python.keras.backend import function
from tensorflow.python.keras.backend import gather
from tensorflow.python.keras.backend import get_session
from tensorflow.python.keras.backend import get_uid
from tensorflow.python.keras.backend import get_value
from tensorflow.python.keras.backend import gradients
from tensorflow.python.keras.backend import greater
from tensorflow.python.keras.backend import greater_equal
from tensorflow.python.keras.backend import hard_sigmoid
from tensorflow.python.keras.backend import image_data_format
from tensorflow.python.keras.backend import in_test_phase
from tensorflow.python.keras.backend import in_top_k
from tensorflow.python.keras.backend import in_train_phase
from tensorflow.python.keras.backend import int_shape
from tensorflow.python.keras.backend import is_sparse
from tensorflow.python.keras.backend import l2_normalize
from tensorflow.python.keras.backend import learning_phase
from tensorflow.python.keras.backend import less
from tensorflow.python.keras.backend import less_equal
from tensorflow.python.keras.backend import log
from tensorflow.python.keras.backend import manual_variable_initialization
from tensorflow.python.keras.backend import map_fn
from tensorflow.python.keras.backend import max
from tensorflow.python.keras.backend import maximum
from tensorflow.python.keras.backend import mean
from tensorflow.python.keras.backend import min
from tensorflow.python.keras.backend import minimum
from tensorflow.python.keras.backend import moving_average_update
from tensorflow.python.keras.backend import name_scope
from tensorflow.python.keras.backend import ndim
from tensorflow.python.keras.backend import normalize_batch_in_training
from tensorflow.python.keras.backend import not_equal
from tensorflow.python.keras.backend import one_hot
from tensorflow.python.keras.backend import ones
from tensorflow.python.keras.backend import ones_like
from tensorflow.python.keras.backend import permute_dimensions
from tensorflow.python.keras.backend import placeholder
from tensorflow.python.keras.backend import pool2d
from tensorflow.python.keras.backend import pool3d
from tensorflow.python.keras.backend import pow
from tensorflow.python.keras.backend import print_tensor
from tensorflow.python.keras.backend import prod
from tensorflow.python.keras.backend import random_binomial
from tensorflow.python.keras.backend import random_normal
from tensorflow.python.keras.backend import random_normal_variable
from tensorflow.python.keras.backend import random_uniform
from tensorflow.python.keras.backend import random_uniform_variable
from tensorflow.python.keras.backend import relu
from tensorflow.python.keras.backend import repeat
from tensorflow.python.keras.backend import repeat_elements
from tensorflow.python.keras.backend import reset_uids
from tensorflow.python.keras.backend import reshape
from tensorflow.python.keras.backend import resize_images
from tensorflow.python.keras.backend import resize_volumes
from tensorflow.python.keras.backend import reverse
from tensorflow.python.keras.backend import rnn
from tensorflow.python.keras.backend import round
from tensorflow.python.keras.backend import separable_conv2d
from tensorflow.python.keras.backend import set_epsilon
from tensorflow.python.keras.backend import set_floatx
from tensorflow.python.keras.backend import set_image_data_format
from tensorflow.python.keras.backend import set_learning_phase
from tensorflow.python.keras.backend import set_session
from tensorflow.python.keras.backend import set_value
from tensorflow.python.keras.backend import shape
from tensorflow.python.keras.backend import sigmoid
from tensorflow.python.keras.backend import sign
from tensorflow.python.keras.backend import sin
from tensorflow.python.keras.backend import softmax
from tensorflow.python.keras.backend import softplus
from tensorflow.python.keras.backend import softsign
from tensorflow.python.keras.backend import sparse_categorical_crossentropy
from tensorflow.python.keras.backend import spatial_2d_padding
from tensorflow.python.keras.backend import spatial_3d_padding
from tensorflow.python.keras.backend import sqrt
from tensorflow.python.keras.backend import square
from tensorflow.python.keras.backend import squeeze
from tensorflow.python.keras.backend import stack
from tensorflow.python.keras.backend import std
from tensorflow.python.keras.backend import stop_gradient
from tensorflow.python.keras.backend import sum
from tensorflow.python.keras.backend import switch
from tensorflow.python.keras.backend import tanh
from tensorflow.python.keras.backend import temporal_padding
from tensorflow.python.keras.backend import to_dense
from tensorflow.python.keras.backend import transpose
from tensorflow.python.keras.backend import truncated_normal
from tensorflow.python.keras.backend import update
from tensorflow.python.keras.backend import update_add
from tensorflow.python.keras.backend import update_sub
from tensorflow.python.keras.backend import var
from tensorflow.python.keras.backend import variable
from tensorflow.python.keras.backend import zeros
from tensorflow.python.keras.backend import zeros_like
del absolute_import
del division
del print_function
| 52.457317 | 81 | 0.838428 | 1,178 | 8,603 | 6.033956 | 0.187606 | 0.234665 | 0.388295 | 0.485369 | 0.822876 | 0.822876 | 0.480726 | 0.100732 | 0.014913 | 0 | 0 | 0.002321 | 0.098454 | 8,603 | 163 | 82 | 52.779141 | 0.914131 | 0.082994 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.986111 | 0 | 0.986111 | 0.020833 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
b9b340dc15076c312720f0c21e75a707df5b232a | 29,476 | py | Python | tests/test_fanno_relations.py | Rigel09/CompAero | 79a2902880c5bf6030794d585a48fbbf0c7df344 | [
"MIT"
] | 1 | 2022-03-29T23:59:16.000Z | 2022-03-29T23:59:16.000Z | tests/test_fanno_relations.py | Rigel09/CompAero | 79a2902880c5bf6030794d585a48fbbf0c7df344 | [
"MIT"
] | 7 | 2022-01-15T15:38:45.000Z | 2022-01-22T16:32:16.000Z | tests/test_fanno_relations.py | Rigel09/CompAero | 79a2902880c5bf6030794d585a48fbbf0c7df344 | [
"MIT"
] | null | null | null | from pytest import approx
from CompAero.FannoFlowRelations import FannoFlowRelations as ffr
from CompAero.internal import FlowState as FS
class TestFannoClassFuncs:
gamma = 1.4
# Test the Functions for Subsonic Case
#######################################################################################
def test_subsonic_t_tstar(self):
assert ffr.calc_T_Tstar(0.5, self.gamma) == approx(1.1429, rel=1e-4)
def test_subsonic_mach_from_t_tstar(self):
assert ffr.calc_mach_from_T_TStar(1.14285714, self.gamma) == approx(0.5, rel=1e-2)
def test_subsonic_p_pstar(self):
assert ffr.calc_P_Pstar(0.5, self.gamma) == approx(2.1381, rel=1e-4)
def test_subsonic_mach_from_p_pstar(self):
assert ffr.calc_mach_from_P_PStar(2.13808993, self.gamma) == approx(0.5, rel=1e-2)
def test_subsonic_rho_rhoStar(self):
assert ffr.calc_Rho_RhoStar(0.5, self.gamma) == approx(1.871, rel=1e-4)
def test_subsonic_mach_from_rho_rhoStar(self):
assert ffr.calc_mach_from_Rho_RhoStar(1.871, 1.4) == approx(0.5, 1e-3)
def test_subsonic_p0_p0Star(self):
assert ffr.calc_Po_PoStar(0.5, self.gamma) == approx(1.3398, rel=1e-4)
def test_subsonic_mach_from_p0_p0Star(self):
assert ffr.calc_mach_from_Po_PoStar(1.33984375, self.gamma, flowType=FS.SUB_SONIC) == approx(
0.5, 1e-3
)
def test_subsonic_4FLstarD(self):
assert ffr.calc_4FLSt_D(0.5, self.gamma) == approx(1.0691, rel=1e-4)
def test_subsonic_mach_from_4FLstarD(self):
assert ffr.calc_mach_from_4FLSt_D(1.06906031, self.gamma, flowType=FS.SUB_SONIC) == approx(
0.5, rel=1e-3
)
def test_subsonic_u_uStar(self):
assert ffr.calc_U_UStar(0.5, self.gamma) == approx(0.5345, rel=1e-4)
def test_subsonic_mach_from_u_uStar(self):
assert ffr.calc_mach_from_U_USt(0.53452248, self.gamma) == approx(0.5, rel=1e-3)
# Test the Functions for Supersonic Case
#######################################################################################
def test_supersonic_t_tstar(self):
assert ffr.calc_T_Tstar(1.5, self.gamma) == approx(0.82759, rel=1e-4)
def test_supersonic_mach_from_t_tstar(self):
assert ffr.calc_mach_from_T_TStar(0.82758620, self.gamma) == approx(1.5, rel=1e-2)
def test_supersonic_p_pstar(self):
assert ffr.calc_P_Pstar(1.5, self.gamma) == approx(0.6065, rel=1e-4)
def test_supersonic_mach_from_p_pstar(self):
assert ffr.calc_mach_from_P_PStar(0.60647843, self.gamma) == approx(1.5, rel=1e-2)
def test_supersonic_rho_rhoStar(self):
assert ffr.calc_Rho_RhoStar(1.5, self.gamma) == approx(0.7328, rel=1e-4)
def test_supersonic_mach_from_rho_rhoStar(self):
assert ffr.calc_mach_from_Rho_RhoStar(0.7328, 1.4) == approx(1.5, 1e-3)
def test_supersonic_p0_p0Star(self):
assert ffr.calc_Po_PoStar(1.5, self.gamma) == approx(1.1762, rel=1e-4)
def test_supersonic_mach_from_p0_p0Star(self):
assert ffr.calc_mach_from_Po_PoStar(1.17616705, self.gamma, flowType=FS.SUPER_SONIC) == approx(
1.5, 1e-3
)
def test_supersonic_4FLstarD(self):
assert ffr.calc_4FLSt_D(1.5, self.gamma) == approx(0.13605, rel=1e-4)
def test_supersonic_mach_from_4FLstarD(self):
assert ffr.calc_mach_from_4FLSt_D(0.13605021, self.gamma, flowType=FS.SUPER_SONIC) == approx(
1.5, rel=1e-3
)
def test_supersonic_u_uStar(self):
assert ffr.calc_U_UStar(1.5, self.gamma) == approx(1.3646, rel=1e-4)
def test_supersonic_mach_from_u_uStar(self):
assert ffr.calc_mach_from_U_USt(1.36457647, self.gamma) == approx(1.5, rel=1e-3)
class TestFannoClassSubsonic:
gamma = 1.4
def test_fanno_from_mach(self):
inst = ffr(self.gamma, mach=0.5)
inst.apply_pipe_parameters(0.4, 11, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(0.593, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.1211, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.7855, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.1966, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.5926, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.5191, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(0.6279, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_t_tStar(self):
inst = ffr(self.gamma, t_tSt=1.1428571428571428)
inst.apply_pipe_parameters(0.4, 11, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(0.593, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.1211, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.7855, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.1966, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.5926, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.5191, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(0.6279, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_p_pStar(self):
inst = ffr(self.gamma, p_pSt=2.1381)
inst.apply_pipe_parameters(0.4, 11, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(0.593, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.1211, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.7855, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.1966, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.5926, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.5191, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(0.6279, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_rho_rhoStar(self):
inst = ffr(self.gamma, rho_rhoSt=1.8708286933869707)
inst.apply_pipe_parameters(0.4, 11, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(0.593, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.1211, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.7855, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.1966, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.5926, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.5191, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(0.6279, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_po_poStar(self):
inst = ffr(self.gamma, po_poSt=1.33984375, flowType=FS.SUB_SONIC)
inst.apply_pipe_parameters(0.4, 11, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(0.593, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.1211, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.7855, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.1966, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.5926, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.5191, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(0.6279, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_f4LStar_D(self):
inst = ffr(self.gamma, f4LSt_D=1.0690603127182559, flowType=FS.SUB_SONIC)
inst.apply_pipe_parameters(0.4, 11, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(0.593, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.1211, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.7855, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.1966, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.5926, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.5191, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(0.6279, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_u_uStar(self):
inst = ffr(self.gamma, u_uSt=0.5345224838248488)
inst.apply_pipe_parameters(0.4, 11, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(0.593, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.1211, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.7855, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.1966, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.5926, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.5191, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(0.6279, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_choked_flow(self):
inst = ffr(self.gamma, mach=0.5)
inst.apply_pipe_parameters(0.4, 22, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(0.5, rel=1e-3)
assert inst.t_tSt == approx(1.1429, rel=1e-4)
assert inst.p_pSt == approx(2.1381, rel=1e-4)
assert inst.rho_rhoSt == approx(1.871, rel=1e-4)
assert inst.po_poSt == approx(1.3398, rel=1e-4)
assert inst.f4LSt_D == approx(1.0691, rel=1e-4)
assert inst.u_uSt == approx(0.5345, rel=1e-4)
assert inst.flowType == FS.SUB_SONIC
assert inst.chockedFlow
assert inst.dwnStrmMach == approx(1.0, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.0, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.0, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.0, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.0, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.0, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
class TestFannoClassSupersonic:
gamma = 1.4
def test_fanno_from_mach(self):
inst = ffr(self.gamma, mach=1.5)
inst.apply_pipe_parameters(0.4, 1.5, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(1.2887, 1e-3)
assert inst.dwnStrm_t_tSt == approx(0.9008, 1e-3)
assert inst.dwnStrm_p_pSt == approx(0.7365, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0616, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(0.8176, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.06105, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.2231, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_t_tStar(self):
inst = ffr(self.gamma, t_tSt=0.8275862068965517)
inst.apply_pipe_parameters(0.4, 1.5, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(1.2887, 1e-3)
assert inst.dwnStrm_t_tSt == approx(0.9008, 1e-3)
assert inst.dwnStrm_p_pSt == approx(0.7365, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0616, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(0.8176, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.06105, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.2231, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_p_pStar(self):
inst = ffr(self.gamma, p_pSt=0.6064784348631227)
inst.apply_pipe_parameters(0.4, 1.5, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(1.2887, 1e-3)
assert inst.dwnStrm_t_tSt == approx(0.9008, 1e-3)
assert inst.dwnStrm_p_pSt == approx(0.7365, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0616, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(0.8176, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.06105, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.2231, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_rho_rhoStar(self):
inst = ffr(self.gamma, rho_rhoSt=0.7328281087929399)
inst.apply_pipe_parameters(0.4, 1.5, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(1.2887, 1e-3)
assert inst.dwnStrm_t_tSt == approx(0.9008, 1e-3)
assert inst.dwnStrm_p_pSt == approx(0.7365, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0616, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(0.8176, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.06105, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.2231, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_po_poStar(self):
inst = ffr(self.gamma, po_poSt=1.1761670524691357, flowType=FS.SUPER_SONIC)
inst.apply_pipe_parameters(0.4, 1.5, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(1.2887, 1e-3)
assert inst.dwnStrm_t_tSt == approx(0.9008, 1e-3)
assert inst.dwnStrm_p_pSt == approx(0.7365, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0616, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(0.8176, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.06105, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.2231, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_f4LStar_D(self):
inst = ffr(self.gamma, f4LSt_D=0.13605021738414635, flowType=FS.SUPER_SONIC)
inst.apply_pipe_parameters(0.4, 1.5, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(1.2887, 1e-3)
assert inst.dwnStrm_t_tSt == approx(0.9008, 1e-3)
assert inst.dwnStrm_p_pSt == approx(0.7365, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0616, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(0.8176, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.06105, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.2231, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_from_u_uStar(self):
inst = ffr(self.gamma, u_uSt=1.364576478442026)
inst.apply_pipe_parameters(0.4, 1.5, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert not inst.chockedFlow
assert inst.dwnStrmMach == approx(1.2887, 1e-3)
assert inst.dwnStrm_t_tSt == approx(0.9008, 1e-3)
assert inst.dwnStrm_p_pSt == approx(0.7365, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0616, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(0.8176, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.06105, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.2231, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
def test_fanno_choked_flow(self):
inst = ffr(self.gamma, mach=1.5)
inst.apply_pipe_parameters(0.4, 22, 0.005)
assert inst.gamma == approx(self.gamma, rel=1e-3)
assert inst.mach == approx(1.5, rel=1e-3)
assert inst.t_tSt == approx(0.8276, rel=1e-4)
assert inst.p_pSt == approx(0.6065, rel=1e-4)
assert inst.rho_rhoSt == approx(0.7328, rel=1e-4)
assert inst.po_poSt == approx(1.1762, rel=1e-4)
assert inst.f4LSt_D == approx(0.13605, rel=1e-4)
assert inst.u_uSt == approx(1.3646, rel=1e-4)
assert inst.flowType == FS.SUPER_SONIC
assert inst.chockedFlow
assert inst.dwnStrmMach == approx(1.0, 1e-3)
assert inst.dwnStrm_t_tSt == approx(1.0, 1e-3)
assert inst.dwnStrm_p_pSt == approx(1.0, rel=1e-4)
assert inst.dwnStrm_po_poSt == approx(1.0, rel=1e-4)
assert inst.dwnStrm_rho_rhoSt == approx(1.0, rel=1e-4)
assert inst.dwnStrm_f4LSt_D == approx(0.0, rel=1e-4)
assert inst.dwnStrm_u_uSt == approx(1.0, rel=1e-4)
assert inst.p2_p1 == approx(inst.dwnStrm_p_pSt / inst.p_pSt, rel=1e-5)
assert inst.rho2_rho1 == approx(inst.dwnStrm_rho_rhoSt / inst.rho_rhoSt, rel=1e-5)
assert inst.t2_t1 == approx(inst.dwnStrm_t_tSt / inst.t_tSt, rel=1e-5)
assert inst.po2_po1 == approx(inst.dwnStrm_po_poSt / inst.po_poSt, rel=1e-5)
assert inst.f4LD2_f4LD1 == approx(inst.dwnStrm_f4LSt_D / inst.f4LSt_D, rel=1e-5)
assert inst.u2_u1 == approx(inst.dwnStrm_u_uSt / inst.u_uSt, rel=1e-5)
| 55.406015 | 103 | 0.652429 | 5,018 | 29,476 | 3.640295 | 0.026903 | 0.193792 | 0.061751 | 0.115618 | 0.967647 | 0.965457 | 0.950895 | 0.945092 | 0.908414 | 0.900531 | 0 | 0.110992 | 0.206507 | 29,476 | 531 | 104 | 55.510358 | 0.670016 | 0.002544 | 0 | 0.846154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.814969 | 1 | 0.08316 | false | 0 | 0.006237 | 0 | 0.101871 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b9b83f4dd0fec2e9ccceca41d1f6d2840cba2b09 | 68,663 | py | Python | ibm_platform_services/iam_policy_management_v1.py | swcolley/platform-services-python-sdk | c146d25a1e1800da23cde872ff38c39e951516a6 | [
"Apache-2.0"
] | null | null | null | ibm_platform_services/iam_policy_management_v1.py | swcolley/platform-services-python-sdk | c146d25a1e1800da23cde872ff38c39e951516a6 | [
"Apache-2.0"
] | null | null | null | ibm_platform_services/iam_policy_management_v1.py | swcolley/platform-services-python-sdk | c146d25a1e1800da23cde872ff38c39e951516a6 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
# (C) Copyright IBM Corp. 2021.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# IBM OpenAPI SDK Code Generator Version: 3.29.1-b338fb38-20210313-010605
"""
IAM Policy Management API
"""
from datetime import datetime
from typing import Dict, List
import json
from ibm_cloud_sdk_core import BaseService, DetailedResponse
from ibm_cloud_sdk_core.authenticators.authenticator import Authenticator
from ibm_cloud_sdk_core.get_authenticator import get_authenticator_from_environment
from ibm_cloud_sdk_core.utils import convert_model, datetime_to_string, string_to_datetime
from .common import get_sdk_headers
##############################################################################
# Service
##############################################################################
class IamPolicyManagementV1(BaseService):
"""The iam_policy_management V1 service."""
DEFAULT_SERVICE_URL = 'https://iam.cloud.ibm.com'
DEFAULT_SERVICE_NAME = 'iam_policy_management'
@classmethod
def new_instance(cls,
service_name: str = DEFAULT_SERVICE_NAME,
) -> 'IamPolicyManagementV1':
"""
Return a new client for the iam_policy_management service using the
specified parameters and external configuration.
"""
authenticator = get_authenticator_from_environment(service_name)
service = cls(
authenticator
)
service.configure_service(service_name)
return service
def __init__(self,
authenticator: Authenticator = None,
) -> None:
"""
Construct a new client for the iam_policy_management service.
:param Authenticator authenticator: The authenticator specifies the authentication mechanism.
Get up to date information from https://github.com/IBM/python-sdk-core/blob/master/README.md
about initializing the authenticator of your choice.
"""
BaseService.__init__(self,
service_url=self.DEFAULT_SERVICE_URL,
authenticator=authenticator)
#########################
# Policies
#########################
def list_policies(self,
account_id: str,
*,
accept_language: str = None,
iam_id: str = None,
access_group_id: str = None,
type: str = None,
service_type: str = None,
tag_name: str = None,
tag_value: str = None,
sort: str = None,
format: str = None,
state: str = None,
**kwargs
) -> DetailedResponse:
"""
Get policies by attributes.
Get policies and filter by attributes. While managing policies, you may want to
retrieve policies in the account and filter by attribute values. This can be done
through query parameters. Currently, only the following attributes are supported:
account_id, iam_id, access_group_id, type, service_type, sort, format and state.
account_id is a required query parameter. Only policies that have the specified
attributes and that the caller has read access to are returned. If the caller does
not have read access to any policies an empty array is returned.
:param str account_id: The account GUID in which the policies belong to.
:param str accept_language: (optional) Translation language code.
:param str iam_id: (optional) The IAM ID used to identify the subject.
:param str access_group_id: (optional) The access group id.
:param str type: (optional) The type of policy (access or authorization).
:param str service_type: (optional) The type of service.
:param str tag_name: (optional) The name of the access management tag in
the policy.
:param str tag_value: (optional) The value of the access management tag in
the policy.
:param str sort: (optional) Sort the results by any of the top level policy
fields (id, created_at, created_by_id, last_modified_at, etc).
:param str format: (optional) Include additional data per policy returned
[include_last_permit, display].
:param str state: (optional) The state of the policy, 'active' or
'deleted'.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `PolicyList` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='list_policies')
headers.update(sdk_headers)
params = {
'account_id': account_id,
'iam_id': iam_id,
'access_group_id': access_group_id,
'type': type,
'service_type': service_type,
'tag_name': tag_name,
'tag_value': tag_value,
'sort': sort,
'format': format,
'state': state
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/v1/policies'
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request)
return response
def create_policy(self,
type: str,
subjects: List['PolicySubject'],
roles: List['PolicyRole'],
resources: List['PolicyResource'],
*,
description: str = None,
accept_language: str = None,
**kwargs
) -> DetailedResponse:
"""
Create a policy.
Creates a policy to grant access between a subject and a resource. There are two
types of policies: **access** and **authorization**. A policy administrator might
want to create an access policy which grants access to a user, service-id, or an
access group. They might also want to create an authorization policy and setup
access between services.
### Access
To create an access policy, use **`"type": "access"`** in the body. The possible
subject attributes are **`iam_id`** and **`access_group_id`**. Use the
**`iam_id`** subject attribute for assigning access for a user or service-id. Use
the **`access_group_id`** subject attribute for assigning access for an access
group. The roles must be a subset of a service's or the platform's supported
roles. The resource attributes must be a subset of a service's or the platform's
supported attributes. The policy resource must include either the
**`serviceType`**, **`serviceName`**, or **`resourceGroupId`** attribute and the
**`accountId`** attribute.` If the subject is a locked service-id, the request
will fail.
### Authorization
Authorization policies are supported by services on a case by case basis. Refer to
service documentation to verify their support of authorization policies. To create
an authorization policy, use **`"type": "authorization"`** in the body. The
subject attributes must match the supported authorization subjects of the
resource. Multiple subject attributes might be provided. The following attributes
are supported:
serviceName, serviceInstance, region, resourceType, resource, accountId The
policy roles must be a subset of the supported authorization roles supported by
the target service. The user must also have the same level of access or greater to
the target resource in order to grant the role. The resource attributes must be a
subset of a service's or the platform's supported attributes. Both the policy
subject and the policy resource must include the **`serviceName`** and
**`accountId`** attributes.
:param str type: The policy type; either 'access' or 'authorization'.
:param List[PolicySubject] subjects: The subjects associated with a policy.
:param List[PolicyRole] roles: A set of role cloud resource names (CRNs)
granted by the policy.
:param List[PolicyResource] resources: The resources associated with a
policy.
:param str description: (optional) Customer-defined description.
:param str accept_language: (optional) Translation language code.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Policy` object
"""
if type is None:
raise ValueError('type must be provided')
if subjects is None:
raise ValueError('subjects must be provided')
if roles is None:
raise ValueError('roles must be provided')
if resources is None:
raise ValueError('resources must be provided')
subjects = [convert_model(x) for x in subjects]
roles = [convert_model(x) for x in roles]
resources = [convert_model(x) for x in resources]
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='create_policy')
headers.update(sdk_headers)
data = {
'type': type,
'subjects': subjects,
'roles': roles,
'resources': resources,
'description': description
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/v1/policies'
request = self.prepare_request(method='POST',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def update_policy(self,
policy_id: str,
if_match: str,
type: str,
subjects: List['PolicySubject'],
roles: List['PolicyRole'],
resources: List['PolicyResource'],
*,
description: str = None,
**kwargs
) -> DetailedResponse:
"""
Update a policy.
Update a policy to grant access between a subject and a resource. A policy
administrator might want to update an existing policy. The policy type cannot be
changed (You cannot change an access policy to an authorization policy).
### Access
To update an access policy, use **`"type": "access"`** in the body. The possible
subject attributes are **`iam_id`** and **`access_group_id`**. Use the
**`iam_id`** subject attribute for assigning access for a user or service-id. Use
the **`access_group_id`** subject attribute for assigning access for an access
group. The roles must be a subset of a service's or the platform's supported
roles. The resource attributes must be a subset of a service's or the platform's
supported attributes. The policy resource must include either the
**`serviceType`**, **`serviceName`**, or **`resourceGroupId`** attribute and the
**`accountId`** attribute.` If the subject is a locked service-id, the request
will fail.
### Authorization
To update an authorization policy, use **`"type": "authorization"`** in the body.
The subject attributes must match the supported authorization subjects of the
resource. Multiple subject attributes might be provided. The following attributes
are supported:
serviceName, serviceInstance, region, resourceType, resource, accountId The
policy roles must be a subset of the supported authorization roles supported by
the target service. The user must also have the same level of access or greater to
the target resource in order to grant the role. The resource attributes must be a
subset of a service's or the platform's supported attributes. Both the policy
subject and the policy resource must include the **`serviceName`** and
**`accountId`** attributes.
:param str policy_id: The policy ID.
:param str if_match: The revision number for updating a policy and must
match the ETag value of the existing policy. The Etag can be retrieved
using the GET /v1/policies/{policy_id} API and looking at the ETag response
header.
:param str type: The policy type; either 'access' or 'authorization'.
:param List[PolicySubject] subjects: The subjects associated with a policy.
:param List[PolicyRole] roles: A set of role cloud resource names (CRNs)
granted by the policy.
:param List[PolicyResource] resources: The resources associated with a
policy.
:param str description: (optional) Customer-defined description.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Policy` object
"""
if policy_id is None:
raise ValueError('policy_id must be provided')
if if_match is None:
raise ValueError('if_match must be provided')
if type is None:
raise ValueError('type must be provided')
if subjects is None:
raise ValueError('subjects must be provided')
if roles is None:
raise ValueError('roles must be provided')
if resources is None:
raise ValueError('resources must be provided')
subjects = [convert_model(x) for x in subjects]
roles = [convert_model(x) for x in roles]
resources = [convert_model(x) for x in resources]
headers = {
'If-Match': if_match
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_policy')
headers.update(sdk_headers)
data = {
'type': type,
'subjects': subjects,
'roles': roles,
'resources': resources,
'description': description
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['policy_id']
path_param_values = self.encode_path_vars(policy_id)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/policies/{policy_id}'.format(**path_param_dict)
request = self.prepare_request(method='PUT',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_policy(self,
policy_id: str,
**kwargs
) -> DetailedResponse:
"""
Retrieve a policy by ID.
Retrieve a policy by providing a policy ID.
:param str policy_id: The policy ID.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Policy` object
"""
if policy_id is None:
raise ValueError('policy_id must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_policy')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['policy_id']
path_param_values = self.encode_path_vars(policy_id)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/policies/{policy_id}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def delete_policy(self,
policy_id: str,
**kwargs
) -> DetailedResponse:
"""
Delete a policy by ID.
Delete a policy by providing a policy ID. A policy cannot be deleted if the
subject ID contains a locked service ID. If the subject of the policy is a locked
service-id, the request will fail.
:param str policy_id: The policy ID.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if policy_id is None:
raise ValueError('policy_id must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='delete_policy')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
path_param_keys = ['policy_id']
path_param_values = self.encode_path_vars(policy_id)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/policies/{policy_id}'.format(**path_param_dict)
request = self.prepare_request(method='DELETE',
url=url,
headers=headers)
response = self.send(request)
return response
def patch_policy(self,
policy_id: str,
if_match: str,
*,
state: str = None,
**kwargs
) -> DetailedResponse:
"""
Restore a deleted policy by ID.
Restore a policy that has recently been deleted. A policy administrator might want
to restore a deleted policy. To restore a policy, use **`"state": "active"`** in
the body.
:param str policy_id: The policy ID.
:param str if_match: The revision number for updating a policy and must
match the ETag value of the existing policy. The Etag can be retrieved
using the GET /v1/policies/{policy_id} API and looking at the ETag response
header.
:param str state: (optional) The policy state; either 'active' or
'deleted'.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `Policy` object
"""
if policy_id is None:
raise ValueError('policy_id must be provided')
if if_match is None:
raise ValueError('if_match must be provided')
headers = {
'If-Match': if_match
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='patch_policy')
headers.update(sdk_headers)
data = {
'state': state
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['policy_id']
path_param_values = self.encode_path_vars(policy_id)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v1/policies/{policy_id}'.format(**path_param_dict)
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
#########################
# Roles
#########################
def list_roles(self,
*,
accept_language: str = None,
account_id: str = None,
service_name: str = None,
**kwargs
) -> DetailedResponse:
"""
Get roles by filters.
Get roles based on the filters. While managing roles, you may want to retrieve
roles and filter by usages. This can be done through query parameters. Currently,
we only support the following attributes: account_id, and service_name. Only roles
that match the filter and that the caller has read access to are returned. If the
caller does not have read access to any roles an empty array is returned.
:param str accept_language: (optional) Translation language code.
:param str account_id: (optional) The account GUID in which the roles
belong to.
:param str service_name: (optional) The name of service.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `RoleList` object
"""
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='list_roles')
headers.update(sdk_headers)
params = {
'account_id': account_id,
'service_name': service_name
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/v2/roles'
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request)
return response
def create_role(self,
display_name: str,
actions: List[str],
name: str,
account_id: str,
service_name: str,
*,
description: str = None,
accept_language: str = None,
**kwargs
) -> DetailedResponse:
"""
Create a role.
Creates a custom role for a specific service within the account. An account owner
or a user assigned the Administrator role on the Role management service can
create a custom role. Any number of actions for a single service can be mapped to
the new role, but there must be at least one service-defined action to
successfully create the new role.
:param str display_name: The display name of the role that is shown in the
console.
:param List[str] actions: The actions of the role.
:param str name: The name of the role that is used in the CRN. Can only be
alphanumeric and has to be capitalized.
:param str account_id: The account GUID.
:param str service_name: The service name.
:param str description: (optional) The description of the role.
:param str accept_language: (optional) Translation language code.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `CustomRole` object
"""
if display_name is None:
raise ValueError('display_name must be provided')
if actions is None:
raise ValueError('actions must be provided')
if name is None:
raise ValueError('name must be provided')
if account_id is None:
raise ValueError('account_id must be provided')
if service_name is None:
raise ValueError('service_name must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='create_role')
headers.update(sdk_headers)
data = {
'display_name': display_name,
'actions': actions,
'name': name,
'account_id': account_id,
'service_name': service_name,
'description': description
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
url = '/v2/roles'
request = self.prepare_request(method='POST',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def update_role(self,
role_id: str,
if_match: str,
*,
display_name: str = None,
description: str = None,
actions: List[str] = None,
**kwargs
) -> DetailedResponse:
"""
Update a role.
Update a custom role. A role administrator might want to update an existing role
by updating the display name, description, or the actions that are mapped to the
role. The name, account_id, and service_name can't be changed.
:param str role_id: The role ID.
:param str if_match: The revision number for updating a role and must match
the ETag value of the existing role. The Etag can be retrieved using the
GET /v2/roles/{role_id} API and looking at the ETag response header.
:param str display_name: (optional) The display name of the role that is
shown in the console.
:param str description: (optional) The description of the role.
:param List[str] actions: (optional) The actions of the role.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `CustomRole` object
"""
if role_id is None:
raise ValueError('role_id must be provided')
if if_match is None:
raise ValueError('if_match must be provided')
headers = {
'If-Match': if_match
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_role')
headers.update(sdk_headers)
data = {
'display_name': display_name,
'description': description,
'actions': actions
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['role_id']
path_param_values = self.encode_path_vars(role_id)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v2/roles/{role_id}'.format(**path_param_dict)
request = self.prepare_request(method='PUT',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def get_role(self,
role_id: str,
**kwargs
) -> DetailedResponse:
"""
Retrieve a role by ID.
Retrieve a role by providing a role ID.
:param str role_id: The role ID.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `CustomRole` object
"""
if role_id is None:
raise ValueError('role_id must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_role')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['role_id']
path_param_values = self.encode_path_vars(role_id)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v2/roles/{role_id}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def delete_role(self,
role_id: str,
**kwargs
) -> DetailedResponse:
"""
Delete a role by ID.
Delete a role by providing a role ID.
:param str role_id: The role ID.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if role_id is None:
raise ValueError('role_id must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='delete_role')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
path_param_keys = ['role_id']
path_param_values = self.encode_path_vars(role_id)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v2/roles/{role_id}'.format(**path_param_dict)
request = self.prepare_request(method='DELETE',
url=url,
headers=headers)
response = self.send(request)
return response
##############################################################################
# Models
##############################################################################
class CustomRole():
"""
An additional set of properties associated with a role.
:attr str id: (optional) The role ID.
:attr str display_name: (optional) The display name of the role that is shown in
the console.
:attr str description: (optional) The description of the role.
:attr List[str] actions: (optional) The actions of the role.
:attr str crn: (optional) The role CRN.
:attr str name: (optional) The name of the role that is used in the CRN. Can
only be alphanumeric and has to be capitalized.
:attr str account_id: (optional) The account GUID.
:attr str service_name: (optional) The service name.
:attr datetime created_at: (optional) The UTC timestamp when the role was
created.
:attr str created_by_id: (optional) The iam ID of the entity that created the
role.
:attr datetime last_modified_at: (optional) The UTC timestamp when the role was
last modified.
:attr str last_modified_by_id: (optional) The iam ID of the entity that last
modified the policy.
:attr str href: (optional) The href link back to the role.
"""
def __init__(self,
*,
id: str = None,
display_name: str = None,
description: str = None,
actions: List[str] = None,
crn: str = None,
name: str = None,
account_id: str = None,
service_name: str = None,
created_at: datetime = None,
created_by_id: str = None,
last_modified_at: datetime = None,
last_modified_by_id: str = None,
href: str = None) -> None:
"""
Initialize a CustomRole object.
:param str display_name: (optional) The display name of the role that is
shown in the console.
:param str description: (optional) The description of the role.
:param List[str] actions: (optional) The actions of the role.
:param str name: (optional) The name of the role that is used in the CRN.
Can only be alphanumeric and has to be capitalized.
:param str account_id: (optional) The account GUID.
:param str service_name: (optional) The service name.
"""
self.id = id
self.display_name = display_name
self.description = description
self.actions = actions
self.crn = crn
self.name = name
self.account_id = account_id
self.service_name = service_name
self.created_at = created_at
self.created_by_id = created_by_id
self.last_modified_at = last_modified_at
self.last_modified_by_id = last_modified_by_id
self.href = href
@classmethod
def from_dict(cls, _dict: Dict) -> 'CustomRole':
"""Initialize a CustomRole object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
if 'display_name' in _dict:
args['display_name'] = _dict.get('display_name')
if 'description' in _dict:
args['description'] = _dict.get('description')
if 'actions' in _dict:
args['actions'] = _dict.get('actions')
if 'crn' in _dict:
args['crn'] = _dict.get('crn')
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'account_id' in _dict:
args['account_id'] = _dict.get('account_id')
if 'service_name' in _dict:
args['service_name'] = _dict.get('service_name')
if 'created_at' in _dict:
args['created_at'] = string_to_datetime(_dict.get('created_at'))
if 'created_by_id' in _dict:
args['created_by_id'] = _dict.get('created_by_id')
if 'last_modified_at' in _dict:
args['last_modified_at'] = string_to_datetime(_dict.get('last_modified_at'))
if 'last_modified_by_id' in _dict:
args['last_modified_by_id'] = _dict.get('last_modified_by_id')
if 'href' in _dict:
args['href'] = _dict.get('href')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a CustomRole object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and getattr(self, 'id') is not None:
_dict['id'] = getattr(self, 'id')
if hasattr(self, 'display_name') and self.display_name is not None:
_dict['display_name'] = self.display_name
if hasattr(self, 'description') and self.description is not None:
_dict['description'] = self.description
if hasattr(self, 'actions') and self.actions is not None:
_dict['actions'] = self.actions
if hasattr(self, 'crn') and getattr(self, 'crn') is not None:
_dict['crn'] = getattr(self, 'crn')
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'account_id') and self.account_id is not None:
_dict['account_id'] = self.account_id
if hasattr(self, 'service_name') and self.service_name is not None:
_dict['service_name'] = self.service_name
if hasattr(self, 'created_at') and getattr(self, 'created_at') is not None:
_dict['created_at'] = datetime_to_string(getattr(self, 'created_at'))
if hasattr(self, 'created_by_id') and getattr(self, 'created_by_id') is not None:
_dict['created_by_id'] = getattr(self, 'created_by_id')
if hasattr(self, 'last_modified_at') and getattr(self, 'last_modified_at') is not None:
_dict['last_modified_at'] = datetime_to_string(getattr(self, 'last_modified_at'))
if hasattr(self, 'last_modified_by_id') and getattr(self, 'last_modified_by_id') is not None:
_dict['last_modified_by_id'] = getattr(self, 'last_modified_by_id')
if hasattr(self, 'href') and getattr(self, 'href') is not None:
_dict['href'] = getattr(self, 'href')
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this CustomRole object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'CustomRole') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'CustomRole') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Policy():
"""
The core set of properties associated with a policy.
:attr str id: (optional) The policy ID.
:attr str type: (optional) The policy type; either 'access' or 'authorization'.
:attr str description: (optional) Customer-defined description.
:attr List[PolicySubject] subjects: (optional) The subjects associated with a
policy.
:attr List[PolicyRole] roles: (optional) A set of role cloud resource names
(CRNs) granted by the policy.
:attr List[PolicyResource] resources: (optional) The resources associated with a
policy.
:attr str href: (optional) The href link back to the policy.
:attr datetime created_at: (optional) The UTC timestamp when the policy was
created.
:attr str created_by_id: (optional) The iam ID of the entity that created the
policy.
:attr datetime last_modified_at: (optional) The UTC timestamp when the policy
was last modified.
:attr str last_modified_by_id: (optional) The iam ID of the entity that last
modified the policy.
"""
def __init__(self,
*,
id: str = None,
type: str = None,
description: str = None,
subjects: List['PolicySubject'] = None,
roles: List['PolicyRole'] = None,
resources: List['PolicyResource'] = None,
href: str = None,
created_at: datetime = None,
created_by_id: str = None,
last_modified_at: datetime = None,
last_modified_by_id: str = None) -> None:
"""
Initialize a Policy object.
:param str type: (optional) The policy type; either 'access' or
'authorization'.
:param str description: (optional) Customer-defined description.
:param List[PolicySubject] subjects: (optional) The subjects associated
with a policy.
:param List[PolicyRole] roles: (optional) A set of role cloud resource
names (CRNs) granted by the policy.
:param List[PolicyResource] resources: (optional) The resources associated
with a policy.
"""
self.id = id
self.type = type
self.description = description
self.subjects = subjects
self.roles = roles
self.resources = resources
self.href = href
self.created_at = created_at
self.created_by_id = created_by_id
self.last_modified_at = last_modified_at
self.last_modified_by_id = last_modified_by_id
@classmethod
def from_dict(cls, _dict: Dict) -> 'Policy':
"""Initialize a Policy object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
if 'type' in _dict:
args['type'] = _dict.get('type')
if 'description' in _dict:
args['description'] = _dict.get('description')
if 'subjects' in _dict:
args['subjects'] = [PolicySubject.from_dict(x) for x in _dict.get('subjects')]
if 'roles' in _dict:
args['roles'] = [PolicyRole.from_dict(x) for x in _dict.get('roles')]
if 'resources' in _dict:
args['resources'] = [PolicyResource.from_dict(x) for x in _dict.get('resources')]
if 'href' in _dict:
args['href'] = _dict.get('href')
if 'created_at' in _dict:
args['created_at'] = string_to_datetime(_dict.get('created_at'))
if 'created_by_id' in _dict:
args['created_by_id'] = _dict.get('created_by_id')
if 'last_modified_at' in _dict:
args['last_modified_at'] = string_to_datetime(_dict.get('last_modified_at'))
if 'last_modified_by_id' in _dict:
args['last_modified_by_id'] = _dict.get('last_modified_by_id')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Policy object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and getattr(self, 'id') is not None:
_dict['id'] = getattr(self, 'id')
if hasattr(self, 'type') and self.type is not None:
_dict['type'] = self.type
if hasattr(self, 'description') and self.description is not None:
_dict['description'] = self.description
if hasattr(self, 'subjects') and self.subjects is not None:
_dict['subjects'] = [x.to_dict() for x in self.subjects]
if hasattr(self, 'roles') and self.roles is not None:
_dict['roles'] = [x.to_dict() for x in self.roles]
if hasattr(self, 'resources') and self.resources is not None:
_dict['resources'] = [x.to_dict() for x in self.resources]
if hasattr(self, 'href') and getattr(self, 'href') is not None:
_dict['href'] = getattr(self, 'href')
if hasattr(self, 'created_at') and getattr(self, 'created_at') is not None:
_dict['created_at'] = datetime_to_string(getattr(self, 'created_at'))
if hasattr(self, 'created_by_id') and getattr(self, 'created_by_id') is not None:
_dict['created_by_id'] = getattr(self, 'created_by_id')
if hasattr(self, 'last_modified_at') and getattr(self, 'last_modified_at') is not None:
_dict['last_modified_at'] = datetime_to_string(getattr(self, 'last_modified_at'))
if hasattr(self, 'last_modified_by_id') and getattr(self, 'last_modified_by_id') is not None:
_dict['last_modified_by_id'] = getattr(self, 'last_modified_by_id')
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Policy object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Policy') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Policy') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PolicyList():
"""
A collection of policies.
:attr List[Policy] policies: (optional) List of policies.
"""
def __init__(self,
*,
policies: List['Policy'] = None) -> None:
"""
Initialize a PolicyList object.
:param List[Policy] policies: (optional) List of policies.
"""
self.policies = policies
@classmethod
def from_dict(cls, _dict: Dict) -> 'PolicyList':
"""Initialize a PolicyList object from a json dictionary."""
args = {}
if 'policies' in _dict:
args['policies'] = [Policy.from_dict(x) for x in _dict.get('policies')]
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PolicyList object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'policies') and self.policies is not None:
_dict['policies'] = [x.to_dict() for x in self.policies]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PolicyList object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PolicyList') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PolicyList') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PolicyResource():
"""
The attributes of the resource. Note that only one resource is allowed in a policy.
:attr List[ResourceAttribute] attributes: (optional) List of resource
attributes.
:attr List[ResourceTag] tags: (optional) List of access management tags.
"""
def __init__(self,
*,
attributes: List['ResourceAttribute'] = None,
tags: List['ResourceTag'] = None) -> None:
"""
Initialize a PolicyResource object.
:param List[ResourceAttribute] attributes: (optional) List of resource
attributes.
:param List[ResourceTag] tags: (optional) List of access management tags.
"""
self.attributes = attributes
self.tags = tags
@classmethod
def from_dict(cls, _dict: Dict) -> 'PolicyResource':
"""Initialize a PolicyResource object from a json dictionary."""
args = {}
if 'attributes' in _dict:
args['attributes'] = [ResourceAttribute.from_dict(x) for x in _dict.get('attributes')]
if 'tags' in _dict:
args['tags'] = [ResourceTag.from_dict(x) for x in _dict.get('tags')]
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PolicyResource object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'attributes') and self.attributes is not None:
_dict['attributes'] = [x.to_dict() for x in self.attributes]
if hasattr(self, 'tags') and self.tags is not None:
_dict['tags'] = [x.to_dict() for x in self.tags]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PolicyResource object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PolicyResource') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PolicyResource') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PolicyRole():
"""
A role associated with a policy.
:attr str role_id: The role cloud resource name granted by the policy.
:attr str display_name: (optional) The display name of the role.
:attr str description: (optional) The description of the role.
"""
def __init__(self,
role_id: str,
*,
display_name: str = None,
description: str = None) -> None:
"""
Initialize a PolicyRole object.
:param str role_id: The role cloud resource name granted by the policy.
"""
self.role_id = role_id
self.display_name = display_name
self.description = description
@classmethod
def from_dict(cls, _dict: Dict) -> 'PolicyRole':
"""Initialize a PolicyRole object from a json dictionary."""
args = {}
if 'role_id' in _dict:
args['role_id'] = _dict.get('role_id')
else:
raise ValueError('Required property \'role_id\' not present in PolicyRole JSON')
if 'display_name' in _dict:
args['display_name'] = _dict.get('display_name')
if 'description' in _dict:
args['description'] = _dict.get('description')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PolicyRole object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'role_id') and self.role_id is not None:
_dict['role_id'] = self.role_id
if hasattr(self, 'display_name') and getattr(self, 'display_name') is not None:
_dict['display_name'] = getattr(self, 'display_name')
if hasattr(self, 'description') and getattr(self, 'description') is not None:
_dict['description'] = getattr(self, 'description')
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PolicyRole object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PolicyRole') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PolicyRole') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class PolicySubject():
"""
The subject attribute values that must match in order for this policy to apply in a
permission decision.
:attr List[SubjectAttribute] attributes: (optional) List of subject attributes.
"""
def __init__(self,
*,
attributes: List['SubjectAttribute'] = None) -> None:
"""
Initialize a PolicySubject object.
:param List[SubjectAttribute] attributes: (optional) List of subject
attributes.
"""
self.attributes = attributes
@classmethod
def from_dict(cls, _dict: Dict) -> 'PolicySubject':
"""Initialize a PolicySubject object from a json dictionary."""
args = {}
if 'attributes' in _dict:
args['attributes'] = [SubjectAttribute.from_dict(x) for x in _dict.get('attributes')]
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a PolicySubject object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'attributes') and self.attributes is not None:
_dict['attributes'] = [x.to_dict() for x in self.attributes]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this PolicySubject object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'PolicySubject') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'PolicySubject') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ResourceAttribute():
"""
An attribute associated with a resource.
:attr str name: The name of an attribute.
:attr str value: The value of an attribute.
:attr str operator: (optional) The operator of an attribute.
"""
def __init__(self,
name: str,
value: str,
*,
operator: str = None) -> None:
"""
Initialize a ResourceAttribute object.
:param str name: The name of an attribute.
:param str value: The value of an attribute.
:param str operator: (optional) The operator of an attribute.
"""
self.name = name
self.value = value
self.operator = operator
@classmethod
def from_dict(cls, _dict: Dict) -> 'ResourceAttribute':
"""Initialize a ResourceAttribute object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
else:
raise ValueError('Required property \'name\' not present in ResourceAttribute JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ResourceAttribute JSON')
if 'operator' in _dict:
args['operator'] = _dict.get('operator')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ResourceAttribute object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'operator') and self.operator is not None:
_dict['operator'] = self.operator
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ResourceAttribute object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ResourceAttribute') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ResourceAttribute') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ResourceTag():
"""
A tag associated with a resource.
:attr str name: The name of an access management tag.
:attr str value: The value of an access management tag.
:attr str operator: (optional) The operator of an access management tag.
"""
def __init__(self,
name: str,
value: str,
*,
operator: str = None) -> None:
"""
Initialize a ResourceTag object.
:param str name: The name of an access management tag.
:param str value: The value of an access management tag.
:param str operator: (optional) The operator of an access management tag.
"""
self.name = name
self.value = value
self.operator = operator
@classmethod
def from_dict(cls, _dict: Dict) -> 'ResourceTag':
"""Initialize a ResourceTag object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
else:
raise ValueError('Required property \'name\' not present in ResourceTag JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in ResourceTag JSON')
if 'operator' in _dict:
args['operator'] = _dict.get('operator')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ResourceTag object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
if hasattr(self, 'operator') and self.operator is not None:
_dict['operator'] = self.operator
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ResourceTag object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ResourceTag') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ResourceTag') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Role():
"""
A role resource.
:attr str display_name: (optional) The display name of the role that is shown in
the console.
:attr str description: (optional) The description of the role.
:attr List[str] actions: (optional) The actions of the role.
:attr str crn: (optional) The role CRN.
"""
def __init__(self,
*,
display_name: str = None,
description: str = None,
actions: List[str] = None,
crn: str = None) -> None:
"""
Initialize a Role object.
:param str display_name: (optional) The display name of the role that is
shown in the console.
:param str description: (optional) The description of the role.
:param List[str] actions: (optional) The actions of the role.
"""
self.display_name = display_name
self.description = description
self.actions = actions
self.crn = crn
@classmethod
def from_dict(cls, _dict: Dict) -> 'Role':
"""Initialize a Role object from a json dictionary."""
args = {}
if 'display_name' in _dict:
args['display_name'] = _dict.get('display_name')
if 'description' in _dict:
args['description'] = _dict.get('description')
if 'actions' in _dict:
args['actions'] = _dict.get('actions')
if 'crn' in _dict:
args['crn'] = _dict.get('crn')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Role object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'display_name') and self.display_name is not None:
_dict['display_name'] = self.display_name
if hasattr(self, 'description') and self.description is not None:
_dict['description'] = self.description
if hasattr(self, 'actions') and self.actions is not None:
_dict['actions'] = self.actions
if hasattr(self, 'crn') and getattr(self, 'crn') is not None:
_dict['crn'] = getattr(self, 'crn')
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Role object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Role') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Role') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class RoleList():
"""
A collection of roles returned by the 'list roles' operation.
:attr List[CustomRole] custom_roles: (optional) List of custom roles.
:attr List[Role] service_roles: (optional) List of service roles.
:attr List[Role] system_roles: (optional) List of system roles.
"""
def __init__(self,
*,
custom_roles: List['CustomRole'] = None,
service_roles: List['Role'] = None,
system_roles: List['Role'] = None) -> None:
"""
Initialize a RoleList object.
:param List[CustomRole] custom_roles: (optional) List of custom roles.
:param List[Role] service_roles: (optional) List of service roles.
:param List[Role] system_roles: (optional) List of system roles.
"""
self.custom_roles = custom_roles
self.service_roles = service_roles
self.system_roles = system_roles
@classmethod
def from_dict(cls, _dict: Dict) -> 'RoleList':
"""Initialize a RoleList object from a json dictionary."""
args = {}
if 'custom_roles' in _dict:
args['custom_roles'] = [CustomRole.from_dict(x) for x in _dict.get('custom_roles')]
if 'service_roles' in _dict:
args['service_roles'] = [Role.from_dict(x) for x in _dict.get('service_roles')]
if 'system_roles' in _dict:
args['system_roles'] = [Role.from_dict(x) for x in _dict.get('system_roles')]
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a RoleList object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'custom_roles') and self.custom_roles is not None:
_dict['custom_roles'] = [x.to_dict() for x in self.custom_roles]
if hasattr(self, 'service_roles') and self.service_roles is not None:
_dict['service_roles'] = [x.to_dict() for x in self.service_roles]
if hasattr(self, 'system_roles') and self.system_roles is not None:
_dict['system_roles'] = [x.to_dict() for x in self.system_roles]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this RoleList object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'RoleList') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'RoleList') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SubjectAttribute():
"""
An attribute associated with a subject.
:attr str name: The name of an attribute.
:attr str value: The value of an attribute.
"""
def __init__(self,
name: str,
value: str) -> None:
"""
Initialize a SubjectAttribute object.
:param str name: The name of an attribute.
:param str value: The value of an attribute.
"""
self.name = name
self.value = value
@classmethod
def from_dict(cls, _dict: Dict) -> 'SubjectAttribute':
"""Initialize a SubjectAttribute object from a json dictionary."""
args = {}
if 'name' in _dict:
args['name'] = _dict.get('name')
else:
raise ValueError('Required property \'name\' not present in SubjectAttribute JSON')
if 'value' in _dict:
args['value'] = _dict.get('value')
else:
raise ValueError('Required property \'value\' not present in SubjectAttribute JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SubjectAttribute object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'value') and self.value is not None:
_dict['value'] = self.value
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SubjectAttribute object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SubjectAttribute') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SubjectAttribute') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
| 40.318849 | 107 | 0.598153 | 8,181 | 68,663 | 4.851607 | 0.047671 | 0.016074 | 0.011564 | 0.015066 | 0.826913 | 0.799274 | 0.784687 | 0.757955 | 0.719987 | 0.691492 | 0 | 0.001482 | 0.302099 | 68,663 | 1,702 | 108 | 40.342538 | 0.826795 | 0.348849 | 0 | 0.762448 | 0 | 0 | 0.125837 | 0.003359 | 0 | 0 | 0 | 0 | 0 | 1 | 0.104772 | false | 0 | 0.008299 | 0 | 0.231328 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b9bfd8bdaec450f64918ab633a5ed82e9064c866 | 25,243 | py | Python | tests/api/endpoints/test_file_view.py | jjzhang166/seahub | 8ced28759fc1e158196a7743eb149882451f9143 | [
"Apache-2.0"
] | null | null | null | tests/api/endpoints/test_file_view.py | jjzhang166/seahub | 8ced28759fc1e158196a7743eb149882451f9143 | [
"Apache-2.0"
] | null | null | null | tests/api/endpoints/test_file_view.py | jjzhang166/seahub | 8ced28759fc1e158196a7743eb149882451f9143 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import json
import posixpath
from seaserv import seafile_api
from django.core.urlresolvers import reverse
from seahub.test_utils import BaseTestCase
from seahub.utils import check_filename_with_rename
from tests.common.utils import randstring
try:
from seahub.settings import LOCAL_PRO_DEV_ENV
except ImportError:
LOCAL_PRO_DEV_ENV = False
class FileViewTest(BaseTestCase):
def create_new_repo(self):
new_repo_id = seafile_api.create_repo(name='test-repo-2', desc='',
username=self.user.username, passwd=None)
return new_repo_id
def admin_create_new_repo(self):
new_repo_id = seafile_api.create_repo(name='test-repo-2', desc='',
username=self.admin.username, passwd=None)
return new_repo_id
def get_lib_file_name(self, repo_id):
url = reverse('list_lib_dir', args=[repo_id])
resp = self.client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
json_resp = json.loads(resp.content)
if len(json_resp['dirent_list']) > 0:
for dirent in json_resp['dirent_list']:
if dirent.has_key('is_file') and dirent['is_file']:
return dirent['obj_name']
else:
continue
return None
def setUp(self):
self.user_name = self.user.username
self.admin_name = self.admin.username
self.repo_id = self.repo.id
self.file_path = self.file
self.file_name = os.path.basename(self.file_path)
self.folder_path = self.folder
self.url = reverse('api-v2.1-file-view', args=[self.repo_id])
def tearDown(self):
self.remove_repo()
# for test http GET request
def test_can_get_file_info(self):
self.login_as(self.user)
resp = self.client.get(self.url + '?p=' + self.file_path)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert self.file_name == json_resp['obj_name']
def test_get_file_info_with_invalid_perm(self):
# login as admin, then visit user's file
self.login_as(self.admin)
resp = self.client.get(self.url + '?p=' + self.file_path)
self.assertEqual(403, resp.status_code)
# for test http POST request
def test_post_operation_invalid(self):
self.login_as(self.user)
data = {'operation': 'invalid',}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(400, resp.status_code)
def test_can_create_file(self):
self.login_as(self.user)
# delete old file
resp = self.client.delete(self.url + '?p=' + self.file_path,
{}, 'application/x-www-form-urlencoded')
assert None == self.get_lib_file_name(self.repo_id)
new_name = randstring(6)
new_file_path = '/' + new_name
data = {'operation': 'create',}
# create file
resp = self.client.post(self.url + '?p=' + new_file_path, data)
self.assertEqual(200, resp.status_code)
# check new file in repo
assert new_name == self.get_lib_file_name(self.repo_id)
def test_can_create_same_name_file(self):
self.login_as(self.user)
file_name = os.path.basename(self.file_path.rstrip('/'))
new_name = check_filename_with_rename(self.repo_id, '/', file_name)
data = {'operation': 'create',}
# create file
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
# check new folder has been created
assert new_name == json_resp['obj_name']
def test_create_file_with_invalid_repo_perm(self):
# login as admin, then create file in user's repo
self.login_as(self.admin)
new_name = randstring(6)
new_file_path = '/' + new_name
data = {'operation': 'create',}
resp = self.client.post(self.url + '?p=' + new_file_path, data)
self.assertEqual(403, resp.status_code)
def test_create_file_with_invalid_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit sub-folder with 'r' permission
assert seafile_api.check_permission_by_path(self.repo_id,
self.folder_path, self.admin_name) == 'r'
# login as admin, then create file in a 'r' permission folder
self.login_as(self.admin)
new_name = randstring(6)
new_file_path = posixpath.join(self.folder_path, new_name)
data = {'operation': 'create',}
resp = self.client.post(self.url + '?p=' + new_file_path, data)
self.assertEqual(403, resp.status_code)
def test_can_rename_file(self):
self.login_as(self.user)
new_name = randstring(6)
# check old file exist
assert self.file_name == self.get_lib_file_name(self.repo_id)
data = {'operation': 'rename', 'newname': new_name}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(200, resp.status_code)
# check old file has been renamed to new_name
assert new_name == self.get_lib_file_name(self.repo_id)
def test_rename_file_with_invalid_name(self):
self.login_as(self.user)
# check old file exist
assert self.file_name == self.get_lib_file_name(self.repo_id)
data = {'operation': 'rename', 'newname': '123/456'}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(400, resp.status_code)
def test_can_rename_file_with_same_name(self):
self.login_as(self.user)
# check old file exist
assert self.file_name == self.get_lib_file_name(self.repo_id)
# create a new file
new_name = randstring(6)
data = {'operation': 'create',}
resp = self.client.post(self.url + '?p=/' + new_name, data)
self.assertEqual(200, resp.status_code)
# rename new file with the same of the old file
old_file_name = self.file_name
checked_name = check_filename_with_rename(self.repo_id,
'/', old_file_name)
data = {'operation': 'rename', 'newname': checked_name}
resp = self.client.post(self.url + '?p=/' + new_name, data)
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert checked_name == json_resp['obj_name']
def test_rename_file_with_invalid_repo_perm(self):
# login as admin, then rename file in user's repo
self.login_as(self.admin)
new_name = randstring(6)
data = {'operation': 'rename', 'newname': new_name}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(403, resp.status_code)
def test_rename_file_with_invalid_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# create a file as old file in user repo sub-folder
old_file_name = randstring(6)
seafile_api.post_empty_file(repo_id=self.repo_id,
parent_dir=self.folder_path, filename=old_file_name,
username=self.user_name)
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit old file with 'r' permission
old_file_path = posixpath.join(self.folder_path, old_file_name)
assert seafile_api.check_permission_by_path(self.repo_id,
old_file_path, self.admin_name) == 'r'
# login as admin, then rename a 'r' permission old file
self.login_as(self.admin)
new_name = randstring(6)
data = {'operation': 'rename', 'newname': new_name}
resp = self.client.post(self.url + '?p=' + old_file_path, data)
self.assertEqual(403, resp.status_code)
def test_can_move_file(self):
self.login_as(self.user)
# check old file name exist
assert self.file_name == self.get_lib_file_name(self.repo_id)
# move file
dst_repo_id = self.create_new_repo()
data = {
'operation': 'move',
'dst_repo': dst_repo_id,
'dst_dir': '/',
}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(200, resp.status_code)
# check old file has been delete
assert self.get_lib_file_name(self.repo_id) == None
# check old file has been moved to dst repo
assert self.file_name == self.get_lib_file_name(dst_repo_id)
self.remove_repo(dst_repo_id)
def test_move_file_with_invalid_src_repo_perm(self):
# login as admin, then move file in user's repo
self.login_as(self.admin)
dst_repo_id = self.admin_create_new_repo()
data = {
'operation': 'move',
'dst_repo': dst_repo_id,
'dst_dir': '/',
}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(403, resp.status_code)
def test_move_file_with_invalid_src_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# create a file as old file in user repo sub-folder
old_file_name = randstring(6)
seafile_api.post_empty_file(repo_id=self.repo_id,
parent_dir=self.folder_path, filename=old_file_name,
username=self.user_name)
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit old file with 'r' permission
old_file_path = posixpath.join(self.folder_path, old_file_name)
assert seafile_api.check_permission_by_path(self.repo_id,
old_file_path, self.admin_name) == 'r'
# login as admin, then move a 'r' permission file
self.login_as(self.admin)
dst_repo_id = self.admin_create_new_repo()
data = {
'operation': 'move',
'dst_repo': dst_repo_id,
'dst_dir': '/',
}
resp = self.client.post(self.url + '?p=' + old_file_path, data)
self.assertEqual(403, resp.status_code)
def test_move_file_with_invalid_dst_repo_perm(self):
# login as user, then move file to admin's repo
self.login_as(self.user)
# create new repo for admin
dst_repo_id = self.admin_create_new_repo()
data = {
'operation': 'move',
'dst_repo': dst_repo_id,
'dst_dir': '/',
}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(403, resp.status_code)
def test_move_file_with_invalid_dst_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit sub-folder with 'r' permission
assert seafile_api.check_permission_by_path(self.repo_id,
self.folder_path, self.admin_name) == 'r'
# create a file for admin repo
admin_repo_id = self.admin_create_new_repo()
admin_file_name = randstring(6)
seafile_api.post_empty_file(repo_id=admin_repo_id,
parent_dir='/', filename=admin_file_name,
username=self.admin_name)
# login as admin, then move file to a 'r' permission folder
self.login_as(self.admin)
# create new repo for admin
data = {
'operation': 'move',
'dst_repo': self.repo_id,
'dst_dir': self.folder_path,
}
url = reverse('api-v2.1-file-view', args=[admin_repo_id])
resp = self.client.post(url + '?p=/' + admin_file_name, data)
self.assertEqual(403, resp.status_code)
def test_can_copy_file(self):
self.login_as(self.user)
# check old file name exist
assert self.file_name == self.get_lib_file_name(self.repo_id)
# copy file
dst_repo_id = self.create_new_repo()
data = {
'operation': 'copy',
'dst_repo': dst_repo_id,
'dst_dir': '/',
}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(200, resp.status_code)
# check old file still in old repo
assert self.file_name == self.get_lib_file_name(self.repo_id)
# check old file has been moved to dst repo
assert self.file_name == self.get_lib_file_name(dst_repo_id)
self.remove_repo(dst_repo_id)
def test_copy_file_with_invalid_src_repo_perm(self):
# login as admin, then copy file in user's repo
self.login_as(self.admin)
# copy file
dst_repo_id = self.admin_create_new_repo()
data = {
'operation': 'copy',
'dst_repo': dst_repo_id,
'dst_dir': '/',
}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(403, resp.status_code)
def test_copy_file_with_invalid_dst_repo_perm(self):
# login as user, then copy file to admin's repo
self.login_as(self.user)
# create new repo for admin
dst_repo_id = self.admin_create_new_repo()
data = {
'operation': 'copy',
'dst_repo': dst_repo_id,
'dst_dir': '/',
}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(403, resp.status_code)
def test_copy_file_with_invalid_dst_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit sub-folder with 'r' permission
assert seafile_api.check_permission_by_path(self.repo_id,
self.folder_path, self.admin_name) == 'r'
# create a file for admin repo
admin_repo_id = self.admin_create_new_repo()
admin_file_name = randstring(6)
seafile_api.post_empty_file(repo_id=admin_repo_id,
parent_dir='/', filename=admin_file_name,
username=self.admin_name)
# login as admin, then move file to a 'r' permission folder
self.login_as(self.admin)
# create new repo for admin
data = {
'operation': 'copy',
'dst_repo': self.repo_id,
'dst_dir': self.folder_path,
}
url = reverse('api-v2.1-file-view', args=[admin_repo_id])
resp = self.client.post(url + '?p=/' + admin_file_name, data)
self.assertEqual(403, resp.status_code)
def test_can_revert_file(self):
self.login_as(self.user)
# first rename file
new_name = randstring(6)
seafile_api.rename_file(self.repo_id, '/', self.file_name,
new_name, self.user_name)
new_file_path = '/' + new_name
# get file revisions
commits = seafile_api.get_file_revisions(
self.repo_id, new_file_path, -1, -1, 100)
# then revert file
data = {
'operation': 'revert',
'commit_id': commits[0].id
}
resp = self.client.post(self.url + '?p=' + new_file_path, data)
self.assertEqual(200, resp.status_code)
def test_revert_file_with_invalid_user_permission(self):
# first rename file
new_name = randstring(6)
seafile_api.rename_file(self.repo_id, '/', self.file_name,
new_name, self.user_name)
new_file_path = '/' + new_name
# get file revisions
commits = seafile_api.get_file_revisions(
self.repo_id, new_file_path, -1, -1, 100)
# then revert file
data = {
'operation': 'revert',
'commit_id': commits[0].id
}
resp = self.client.post(self.url + '?p=' + new_file_path, data)
self.assertEqual(403, resp.status_code)
def test_revert_file_with_r_permission(self):
# first rename file
new_name = randstring(6)
seafile_api.rename_file(self.repo_id, '/', self.file_name,
new_name, self.user_name)
new_file_path = '/' + new_name
# get file revisions
commits = seafile_api.get_file_revisions(
self.repo_id, new_file_path, -1, -1, 100)
self.share_repo_to_admin_with_r_permission()
self.login_as(self.admin)
# then revert file
data = {
'operation': 'revert',
'commit_id': commits[0].id
}
resp = self.client.post(self.url + '?p=' + new_file_path, data)
self.assertEqual(403, resp.status_code)
def test_revert_file_without_commit_id(self):
self.login_as(self.user)
data = {
'operation': 'revert',
}
resp = self.client.post(self.url + '?p=' + self.file_path, data)
self.assertEqual(400, resp.status_code)
# for test http PUT request
def test_can_lock_file(self):
if not LOCAL_PRO_DEV_ENV:
return
self.login_as(self.user)
# check file NOT locked when init
return_value = seafile_api.check_file_lock(self.repo_id,
self.file_path.lstrip('/'), self.user.username)
assert return_value == 0
# lock file
data = 'operation=lock'
resp = self.client.put(self.url + '?p=' + self.file_path, data, 'application/x-www-form-urlencoded')
self.assertEqual(200, resp.status_code)
# check file has been locked
return_value = seafile_api.check_file_lock(self.repo_id,
self.file_path.lstrip('/'), self.user.username)
assert return_value == 2
def test_lock_file_with_invalid_repo_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# login as admin, then lock file in user's repo
self.login_as(self.admin)
# lock file
data = 'operation=lock'
resp = self.client.put(self.url + '?p=' + self.file_path, data, 'application/x-www-form-urlencoded')
self.assertEqual(403, resp.status_code)
def test_lock_file_with_invalid_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# create a file in user repo sub-folder
file_name = randstring(6)
seafile_api.post_empty_file(repo_id=self.repo_id,
parent_dir=self.folder_path, filename=file_name,
username=self.user_name)
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit file with 'r' permission
file_path = posixpath.join(self.folder_path, file_name)
assert seafile_api.check_permission_by_path(self.repo_id,
file_path, self.admin_name) == 'r'
# login as admin, then lock a 'r' permission file
self.login_as(self.admin)
data = 'operation=lock'
resp = self.client.put(self.url + '?p=' + file_path,
data, 'application/x-www-form-urlencoded')
self.assertEqual(403, resp.status_code)
def test_can_unlock_file(self):
if not LOCAL_PRO_DEV_ENV:
return
self.login_as(self.user)
# lock file for test
seafile_api.lock_file(self.repo_id, self.file_path.lstrip('/'),
self.user.username, -1)
# check file has been locked when init
return_value = seafile_api.check_file_lock(self.repo_id,
self.file_path.lstrip('/'), self.user.username)
assert return_value == 2
# unlock file
data = 'operation=unlock'
resp = self.client.put(self.url + '?p=' + self.file_path, data, 'application/x-www-form-urlencoded')
self.assertEqual(200, resp.status_code)
# check file has been unlocked
return_value = seafile_api.check_file_lock(self.repo_id,
self.file_path.lstrip('/'), self.user.username)
assert return_value == 0
def test_unlock_file_with_invalid_repo_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# login as admin, then unlock file in user's repo
self.login_as(self.admin)
# unlock file
data = 'operation=unlock'
resp = self.client.put(self.url + '?p=' + self.file_path, data, 'application/x-www-form-urlencoded')
self.assertEqual(403, resp.status_code)
def test_unlock_file_with_invalid_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# create a file in user repo sub-folder
file_name = randstring(6)
seafile_api.post_empty_file(repo_id=self.repo_id,
parent_dir=self.folder_path, filename=file_name,
username=self.user_name)
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit file with 'r' permission
file_path = posixpath.join(self.folder_path, file_name)
assert seafile_api.check_permission_by_path(self.repo_id,
file_path, self.admin_name) == 'r'
# login as admin, then lock a 'r' permission file
self.login_as(self.admin)
data = 'operation=unlock'
resp = self.client.put(self.url + '?p=' + file_path,
data, 'application/x-www-form-urlencoded')
self.assertEqual(403, resp.status_code)
# for test http DELETE request
def test_can_delete_file(self):
self.login_as(self.user)
# check old file name exist
assert self.file_name == self.get_lib_file_name(self.repo_id)
# delete file
resp = self.client.delete(self.url + '?p=' + self.file_path,
{}, 'application/x-www-form-urlencoded')
self.assertEqual(200, resp.status_code)
# check old file has been deleted
assert None == self.get_lib_file_name(self.repo_id)
def test_delete_file_with_invalid_repo_perm(self):
# login as admin, then delete file in user's repo
self.login_as(self.admin)
# delete file
resp = self.client.delete(self.url + '?p=' + self.file_path,
{}, 'application/x-www-form-urlencoded')
self.assertEqual(403, resp.status_code)
def test_delete_file_with_invalid_folder_perm(self):
if not LOCAL_PRO_DEV_ENV:
return
# create a file in user repo sub-folder
file_name = randstring(6)
seafile_api.post_empty_file(repo_id=self.repo_id,
parent_dir=self.folder_path, filename=file_name,
username=self.user_name)
# share user's repo to admin with 'rw' permission
seafile_api.share_repo(self.repo_id, self.user_name,
self.admin_name, 'rw')
# set sub-folder permisson as 'r' for admin
seafile_api.add_folder_user_perm(self.repo_id,
self.folder_path, 'r', self.admin_name)
# admin can visit file with 'r' permission
file_path = posixpath.join(self.folder_path, file_name)
assert seafile_api.check_permission_by_path(self.repo_id,
file_path, self.admin_name) == 'r'
# login as admin, then delete a 'r' permission file
self.login_as(self.admin)
resp = self.client.delete(self.url + '?p=' + file_path,
{}, 'application/x-www-form-urlencoded')
self.assertEqual(403, resp.status_code)
| 33.837802 | 108 | 0.620528 | 3,466 | 25,243 | 4.249856 | 0.048182 | 0.039104 | 0.040733 | 0.033605 | 0.909097 | 0.901018 | 0.884046 | 0.865852 | 0.839308 | 0.828038 | 0 | 0.008812 | 0.276195 | 25,243 | 745 | 109 | 33.883221 | 0.797384 | 0.134453 | 0 | 0.775056 | 0 | 0 | 0.060052 | 0.015174 | 0 | 0 | 0 | 0 | 0.142539 | 1 | 0.08686 | false | 0.004454 | 0.022272 | 0 | 0.146993 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a04932fb906059ce666f47af6193155d4a720e9 | 198 | py | Python | src/ais_toy/csp/heuristics/__init__.py | smastelini/ai-search-toy-examples | 491f1c993d659e0694dd5b82d32cacc399b95060 | [
"MIT"
] | null | null | null | src/ais_toy/csp/heuristics/__init__.py | smastelini/ai-search-toy-examples | 491f1c993d659e0694dd5b82d32cacc399b95060 | [
"MIT"
] | null | null | null | src/ais_toy/csp/heuristics/__init__.py | smastelini/ai-search-toy-examples | 491f1c993d659e0694dd5b82d32cacc399b95060 | [
"MIT"
] | null | null | null | from .heuristics import minimum_remaining_values, \
degree_heuristic, least_constraining_value
__all__ = ['minimum_remaining_values', 'degree_heuristic',
'least_constraining_value']
| 33 | 58 | 0.782828 | 20 | 198 | 7.05 | 0.6 | 0.22695 | 0.312057 | 0.397163 | 0.836879 | 0.836879 | 0.836879 | 0.836879 | 0 | 0 | 0 | 0 | 0.141414 | 198 | 5 | 59 | 39.6 | 0.829412 | 0 | 0 | 0 | 0 | 0 | 0.323232 | 0.242424 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
6a31a9032740676e7ceee1bc0306f89b7ff0440e | 11,679 | py | Python | care/facility/views.py | afzalIbnSH/care | d6ce745670f7915912293ec604037438a6ec7750 | [
"MIT"
] | null | null | null | care/facility/views.py | afzalIbnSH/care | d6ce745670f7915912293ec604037438a6ec7750 | [
"MIT"
] | null | null | null | care/facility/views.py | afzalIbnSH/care | d6ce745670f7915912293ec604037438a6ec7750 | [
"MIT"
] | null | null | null | import logging
from django.shortcuts import render, redirect
from django.views import View
from django.http import HttpResponseRedirect
from django.db import IntegrityError
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.views import redirect_to_login
from django.conf import settings
from .forms import (
FacilityCreationForm,
FacilityCapacityCreationForm,
DoctorsCountCreationForm,
)
from .models import Facility, FacilityCapacity, HospitalDoctors
class StaffRequiredMixin:
def dispatch(self, request, *args, **kwargs):
if request.user.user_type == settings.STAFF_ACCOUNT_TYPE:
return super().dispatch(request, *args, **kwargs)
else:
return redirect_to_login(self.request.get_full_path())
class FacilitiesView(LoginRequiredMixin, StaffRequiredMixin, View):
template = "facility/facilities_view.html"
def get(self, request):
try:
current_user = request.user
data = Facility.objects.filter(created_by=current_user)
return render(request, self.template, {"data": data})
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
class FacilityView(LoginRequiredMixin, StaffRequiredMixin, View):
template = "facility/facility_view.html"
def get(self, request, pk):
try:
current_user = request.user
facility_obj = Facility.objects.get(id=pk, created_by=current_user)
capacities = FacilityCapacity.objects.filter(facility=facility_obj)
doctor_counts = HospitalDoctors.objects.filter(facility=facility_obj)
return render(
request,
self.template,
{
"capacities": capacities,
"doctor_counts": doctor_counts,
"facility": facility_obj,
},
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
class FacilityCreation(LoginRequiredMixin, StaffRequiredMixin, View):
form_class = FacilityCreationForm
template = "facility/facility_creation.html"
def get(self, request):
try:
form = self.form_class()
return render(request, self.template, {"form": form})
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
def post(self, request):
try:
data = request.POST
form = self.form_class(data)
if form.is_valid():
facility_obj = form.save(commit=False)
facility_obj.created_by = request.user
facility_obj.facility_type = 2
facility_obj.save()
return redirect("facility:facility-capacity-create", facility_obj.id)
return render(request, self.template, {"form": form})
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
class FacilityUpdation(LoginRequiredMixin, StaffRequiredMixin, View):
form_class = FacilityCreationForm
template = "facility/facility_updation.html"
def get(self, request, pk):
try:
current_user = request.user
facility_obj = Facility.objects.get(id=pk, created_by=current_user)
form = self.form_class(instance=facility_obj)
return render(request, self.template, {"form": form})
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
def post(self, request, pk):
try:
current_user = request.user
facility_obj = Facility.objects.get(id=pk, created_by=current_user)
data = request.POST
form = self.form_class(data, instance=facility_obj)
if form.is_valid():
form.save()
return redirect("facility:facility-view", facility_obj.id)
return render(request, self.template, {"form": form})
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
class FacilityCapacityCreation(LoginRequiredMixin, StaffRequiredMixin, View):
form_class = FacilityCapacityCreationForm
template = "facility/facility_capacity_creation.html"
def get(self, request, pk):
try:
form = self.form_class()
current_user = request.user
facility_obj = Facility.objects.get(id=pk, created_by=current_user)
return render(
request, self.template, {"form": form, "facility": facility_obj}
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
def post(self, request, pk):
try:
data = request.POST
form = self.form_class(data)
facility_obj = Facility.objects.get(id=pk)
validation_error = False
duplicate = False
if form.is_valid():
if form.cleaned_data.get('total_capacity') >= form.cleaned_data.get('current_capacity'):
duplicate = False
facility_capacity_obj = form.save(commit=False)
facility_obj = Facility.objects.get(id=pk)
facility_capacity_obj.facility = facility_obj
try:
facility_capacity_obj.save()
if "addmore" in data:
return redirect(
"facility:facility-capacity-create", facility_obj.id
)
else:
return redirect(
"facility:facility-doctor-count-create", facility_obj.id
)
except IntegrityError:
duplicate = True
else:
validation_error = True
return render(
request,
self.template,
{"form": form, "facility": facility_obj, "duplicate": duplicate, "validation_error": validation_error},
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
class FacilityCapacityUpdation(LoginRequiredMixin, StaffRequiredMixin, View):
form_class = FacilityCapacityCreationForm
template = "facility/facility_capacity_updation.html"
def get(self, request, fpk, cpk):
try:
current_user = request.user
facility_obj = Facility.objects.get(id=fpk, created_by=current_user)
capacity_obj = FacilityCapacity.objects.get(id=cpk, facility=facility_obj)
form = self.form_class(instance=capacity_obj)
return render(
request, self.template, {"form": form, "facility": facility_obj}
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
def post(self, request, fpk, cpk):
try:
data = request.POST
current_user = request.user
facility_obj = Facility.objects.get(id=fpk, created_by=current_user)
capacity_obj = FacilityCapacity.objects.get(id=cpk, facility=facility_obj)
form = self.form_class(data, instance=capacity_obj)
duplicate = False
validation_error = False
if form.is_valid():
if form.cleaned_data.get('total_capacity') >= form.cleaned_data.get('current_capacity'):
try:
form.save()
return redirect("facility:facility-view", facility_obj.id)
except IntegrityError:
duplicate = True
else:
validation_error = True
return render(
request, self.template, {"form": form, "duplicate": duplicate,
"validation_error": validation_error, "facility": facility_obj}
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
class DoctorCountCreation(LoginRequiredMixin, StaffRequiredMixin, View):
form_class = DoctorsCountCreationForm
template = "facility/facility_doctor_count_creation.html"
def get(self, request, pk):
try:
form = self.form_class()
current_user = request.user
facility_obj = Facility.objects.get(id=pk, created_by=current_user)
return render(
request, self.template, {"form": form, "facility": facility_obj}
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
def post(self, request, pk):
try:
data = request.POST
form = self.form_class(data)
if form.is_valid():
duplicate = False
facility_capacity_obj = form.save(commit=False)
facility_obj = Facility.objects.get(id=pk)
facility_capacity_obj.facility = facility_obj
try:
facility_capacity_obj.save()
if "addmore" in data:
return redirect(
"facility:facility-doctor-count-create", facility_obj.id
)
else:
return redirect("facility:facility-view", facility_obj.id)
except IntegrityError:
duplicate = True
return render(
request,
self.template,
{"form": form, "facility": facility_obj, "duplicate": duplicate},
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
class DoctorCountUpdation(LoginRequiredMixin, StaffRequiredMixin, View):
form_class = DoctorsCountCreationForm
template = "facility/facility_doctor_count_updation.html"
def get(self, request, fpk, cpk):
try:
current_user = request.user
facility_obj = Facility.objects.get(id=fpk, created_by=current_user)
doctor_count_obj = HospitalDoctors.objects.get(
id=cpk, facility=facility_obj
)
form = self.form_class(instance=doctor_count_obj)
return render(
request, self.template, {"form": form, "facility": facility_obj}
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
def post(self, request, fpk, cpk):
try:
data = request.POST
current_user = request.user
facility_obj = Facility.objects.get(id=fpk, created_by=current_user)
doctor_count_obj = HospitalDoctors.objects.get(
id=cpk, facility=facility_obj
)
form = self.form_class(data, instance=doctor_count_obj)
duplicate = False
if form.is_valid():
try:
form.save()
return redirect("facility:facility-view", facility_obj.id)
except IntegrityError:
duplicate = True
return render(
request, self.template, {"form": form, "duplicate": duplicate}
)
except Exception as e:
logging.error(e)
return HttpResponseRedirect("/500")
| 38.042345 | 119 | 0.578474 | 1,111 | 11,679 | 5.933393 | 0.090909 | 0.070085 | 0.029126 | 0.048847 | 0.841171 | 0.801274 | 0.774575 | 0.756978 | 0.751062 | 0.703125 | 0 | 0.005558 | 0.337529 | 11,679 | 306 | 120 | 38.166667 | 0.846452 | 0 | 0 | 0.716418 | 0 | 0 | 0.072866 | 0.044011 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05597 | false | 0 | 0.037313 | 0 | 0.320896 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a425d0a1a22d8e608c61b644b56bbbbe94d2fa8 | 21,099 | py | Python | solvers/inpainter.py | axium/GlowIP | 214415a7fb1d60a368d399a7a1594c37f4b29338 | [
"MIT"
] | 2 | 2019-08-30T12:10:55.000Z | 2021-07-08T20:50:44.000Z | solvers/inpainter.py | axium/GlowIP | 214415a7fb1d60a368d399a7a1594c37f4b29338 | [
"MIT"
] | null | null | null | solvers/inpainter.py | axium/GlowIP | 214415a7fb1d60a368d399a7a1594c37f4b29338 | [
"MIT"
] | 1 | 2021-05-12T17:15:31.000Z | 2021-05-12T17:15:31.000Z | import numpy as np
import torch
from torchvision import datasets
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
from skimage.measure import compare_psnr, compare_ssim
import skimage.io as sio
from glow.glow import Glow
from dcgan.dcgan import Generator
import json
import os
import warnings
warnings.filterwarnings("ignore")
def solveInpainting(args):
if args.prior == 'glow':
GlowInpaint(args)
elif args.prior == 'dcgan':
GANInpaint(args)
else:
raise "prior not defined correctly"
def GlowInpaint(args):
loopOver = zip(args.gamma)
for gamma in loopOver:
skip_to_next = False # flag to skip to next loop if recovery is fails due to instability
n = args.size*args.size*3
modeldir = "./trained_models/%s/glow"%args.model
test_folder = "./test_images/%s"%args.dataset
save_path = "./results/%s/%s"%(args.dataset,args.experiment)
# loading dataset
trans = transforms.Compose([transforms.Resize((args.size,args.size)),transforms.ToTensor()])
test_dataset = datasets.ImageFolder(test_folder, transform=trans)
test_dataloader = torch.utils.data.DataLoader(test_dataset,batch_size=args.batchsize,drop_last=False,shuffle=False)
# loading glow configurations
config_path = modeldir+"/configs.json"
with open(config_path, 'r') as f:
configs = json.load(f)
# regularizor
gamma = torch.tensor(gamma, requires_grad=True, dtype=torch.float, device=args.device)
# getting test images
Original = []
Recovered = []
Masked = []
Mask = []
Residual_Curve = []
for i, data in enumerate(test_dataloader):
# getting batch of data
x_test = data[0]
x_test = x_test.clone().to(device=args.device)
n_test = x_test.size()[0]
assert n_test == args.batchsize, "please make sure that no. of images are evenly divided by batchsize"
# generate mask
mask = gen_mask(args.inpaint_method,args.size,args.mask_size)
mask = np.array([mask for i in range(n_test)])
mask = mask.reshape([n_test,1,args.size,args.size])
mask = torch.tensor(mask, dtype=torch.float, requires_grad=False, device=args.device)
# loading glow model
glow = Glow((3,args.size,args.size),
K=configs["K"],L=configs["L"],
coupling=configs["coupling"],
n_bits_x=configs["n_bits_x"],
nn_init_last_zeros=configs["last_zeros"],
device=args.device)
glow.load_state_dict(torch.load(modeldir+"/glowmodel.pt"))
glow.eval()
# making a forward to record shapes of z's for reverse pass
_ = glow(glow.preprocess(torch.zeros_like(x_test)))
# initializing z from Gaussian
if args.init_strategy == "random":
z_sampled = np.random.normal(0,args.init_std,[n_test,n])
z_sampled = torch.tensor(z_sampled,requires_grad=True,dtype=torch.float,device=args.device)
# initializing z from image with noise filled only in masked region
elif args.init_strategy == "noisy_filled":
x_noisy_filled = x_test.clone().detach()
noise = np.random.normal(0,0.2, x_noisy_filled.size())
noise = torch.tensor(noise,dtype=torch.float,device=args.device)
noise = noise * (1-mask)
x_noisy_filled = x_noisy_filled + noise
x_noisy_filled = torch.clamp(x_noisy_filled, 0, 1)
z, _, _ = glow(x_noisy_filled - 0.5)
z = glow.flatten_z(z).clone().detach()
z_sampled = z.clone().detach().requires_grad_(True)
# initializing z from image with masked region inverted
elif args.init_strategy == "inverted_filled":
x_inverted_filled = x_test.clone().detach()
missing_x = x_inverted_filled.clone()
missing_x = missing_x.data.cpu().numpy()
missing_x = missing_x[:,:,::-1,::-1]
missing_x = torch.tensor(missing_x.copy(),dtype=torch.float,device=args.device)
missing_x = (1-mask)*missing_x
x_inverted_filled = x_inverted_filled * mask
x_inverted_filled = x_inverted_filled + missing_x
z, _, _ = glow(x_inverted_filled - 0.5)
z = glow.flatten_z(z).clone().detach()
z_sampled = z.clone().detach().requires_grad_(True)
# initializing z from masked image ( masked region as zeros )
elif args.init_strategy == "black_filled":
x_black_filled = x_test.clone().detach()
x_black_filled = mask * x_black_filled
x_black_filled = x_black_filled * mask
z, _, _ = glow(x_black_filled - 0.5)
z = glow.flatten_z(z).clone().detach()
z_sampled = z.clone().detach().requires_grad_(True)
# initializing z from noisy complete image
elif args.init_strategy == "noisy":
x_noisy = x_test.clone().detach()
noise = np.random.normal(0,0.05, x_noisy.size())
noise = torch.tensor(noise,dtype=torch.float,device=args.device)
x_noisy = x_noisy + noise
x_noisy = torch.clamp(x_noisy, 0, 1)
z, _, _ = glow(x_noisy - 0.5)
z = glow.flatten_z(z).clone().detach()
z_sampled = z.clone().detach().requires_grad_(True)
# initializing z from image with only noise in masked region
elif args.init_strategy == "only_noise_filled":
x_noisy_filled = x_test.clone().detach()
noise = np.random.normal(0,0.2, x_noisy_filled.size())
noise = torch.tensor(noise,dtype=torch.float,device=args.device)
noise = noise * (1-mask)
x_noisy_filled = mask * x_noisy_filled + noise
x_noisy_filled = torch.clamp(x_noisy_filled, 0, 1)
z, _, _ = glow(x_noisy_filled - 0.5)
z = glow.flatten_z(z).clone().detach()
z_sampled = z.clone().detach().requires_grad_(True)
else:
raise "Initialization strategy not defined"
# selecting optimizer
if args.optim == "adam":
optimizer = torch.optim.Adam([z_sampled], lr=args.lr,)
elif args.optim == "lbfgs":
optimizer = torch.optim.LBFGS([z_sampled], lr=args.lr,)
# metrics to record over training
psnr_t = torch.nn.MSELoss().to(device=args.device)
residual = []
# running optimizer steps
for t in range(args.steps):
def closure():
optimizer.zero_grad()
z_unflat = glow.unflatten_z(z_sampled, clone=False)
x_gen = glow(z_unflat, reverse=True, reverse_clone=False)
x_gen = glow.postprocess(x_gen,floor_clamp=False)
x_masked_test = x_test * mask
x_masked_gen = x_gen * mask
global residual_t
residual_t = ((x_masked_gen - x_masked_test)**2).view(len(x_masked_test),-1).sum(dim=1).mean()
z_reg_loss_t= gamma*z_sampled.norm(dim=1).mean()
loss_t = residual_t + z_reg_loss_t
psnr = psnr_t(x_test, x_gen)
psnr = 10 * np.log10(1 / psnr.item())
print("\rAt step=%0.3d|loss=%0.4f|residual=%0.4f|z_reg=%0.5f|psnr=%0.3f"%(t,loss_t.item(),residual_t.item(),z_reg_loss_t.item(), psnr),end="\r")
loss_t.backward()
return loss_t
try:
optimizer.step(closure)
residual.append(residual_t.item())
except:
skip_to_next = True
break
if skip_to_next:
break
# getting recovered and true images
x_test_np = x_test.data.cpu().numpy().transpose(0,2,3,1)
z_unflat = glow.unflatten_z(z_sampled, clone=False)
x_gen = glow(z_unflat, reverse=True, reverse_clone=False)
x_gen = glow.postprocess(x_gen,floor_clamp=False)
x_gen_np = x_gen.data.cpu().numpy().transpose(0,2,3,1)
x_gen_np = np.clip(x_gen_np,0,1)
mask_np = mask.data.cpu().numpy()
x_masked_test = x_test * mask
x_masked_test_np = x_masked_test.data.cpu().numpy().transpose(0,2,3,1)
x_masked_test_np = np.clip(x_masked_test_np,0,1)
Original.append(x_test_np)
Recovered.append(x_gen_np)
Masked.append(x_masked_test_np)
Residual_Curve.append(residual)
Mask.append(mask_np)
# freeing up memory for second loop
glow.zero_grad()
optimizer.zero_grad()
del x_test, x_gen, optimizer, psnr_t, z_sampled, glow, mask,
torch.cuda.empty_cache()
print("\nbatch completed")
if skip_to_next:
print("\nskipping current loop due to instability or user triggered quit")
continue
# metric evaluations
Original = np.vstack(Original)
Recovered = np.vstack(Recovered)
Masked = np.vstack(Masked)
Mask = np.vstack(Mask)
psnr = [compare_psnr(x, y) for x,y in zip(Original, Recovered)]
# print performance analysis
printout = "+-"*10 + "%s"%args.dataset + "-+"*10 + "\n"
printout = printout + "\t n_test = %d\n"%len(Recovered)
printout = printout + "\t inpaint_method = %s\n"%args.inpaint_method
printout = printout + "\t mask_size = %0.3f\n"%args.mask_size
printout = printout + "\t gamma = %0.6f\n"%gamma
printout = printout + "\t PSNR = %0.3f\n"%np.mean(psnr)
print(printout)
if args.save_metrics_text:
with open("%s_inpaint_glow_results.txt"%args.dataset,"a") as f:
f.write('\n' + printout)
# saving images
if args.save_results:
gamma = gamma.item()
file_names = [name[0].split("/")[-1].split(".")[0] for name in test_dataset.samples]
if args.init_strategy == 'random':
save_path = save_path + "/inpaint_%s_masksize_%0.4f_gamma_%0.6f_steps_%d_lr_%0.3f_init_std_%0.2f_optim_%s"
save_path = save_path%(args.inpaint_method,args.mask_size,gamma,args.steps,args.lr,args.init_std,args.optim)
else:
save_path = save_path + "/inpaint_%s_masksize_%0.4f_gamma_%0.6f_steps_%d_lr_%0.3f_init_%s_optim_%s"
save_path = save_path%(args.inpaint_method,args.mask_size,gamma,args.steps,args.lr,args.init_strategy,args.optim)
if not os.path.exists(save_path):
os.makedirs(save_path)
else:
save_path_1 = save_path + "_1"
if not os.path.exists(save_path_1):
os.makedirs(save_path_1)
save_path = save_path_1
else:
save_path_2 = save_path + "_2"
if not os.path.exists(save_path_2):
os.makedirs(save_path_2)
save_path = save_path_2
_ = [sio.imsave(save_path+"/"+name+"_recov.jpg", x) for x,name in zip(Recovered,file_names)]
_ = [sio.imsave(save_path+"/"+name+"_masked.jpg", x) for x,name in zip(Masked,file_names)]
Residual_Curve = np.array(Residual_Curve).mean(axis=0)
np.save(save_path+"/"+"residual_curve.npy", Residual_Curve)
np.save(save_path+"/original.npy", Original)
np.save(save_path+"/recovered.npy", Recovered)
np.save(save_path+"/mask.npy", Mask)
np.save(save_path+"/masked.npy", Masked)
def GANInpaint(args):
loopOver = zip(args.gamma)
for gamma in loopOver:
n = 100
modeldir = "./trained_models/%s/dcgan"%args.model
test_folder = "./test_images/%s"%args.dataset
save_path = "./results/%s/%s"%(args.dataset,args.experiment)
# loading dataset
trans = transforms.Compose([transforms.Resize((args.size,args.size)),transforms.ToTensor()])
test_dataset = datasets.ImageFolder(test_folder, transform=trans)
test_dataloader = torch.utils.data.DataLoader(test_dataset,batch_size=args.batchsize,drop_last=False,shuffle=False)
# regularizor
gamma = torch.tensor(gamma, requires_grad=True, dtype=torch.float, device=args.device)
# getting test images
Original = []
Recovered = []
Masked = []
Mask = []
Residual_Curve = []
for i, data in enumerate(test_dataloader):
# getting batch of data
x_test = data[0]
x_test = x_test.clone().to(device=args.device)
n_test = x_test.size()[0]
assert n_test == args.batchsize, "please make sure that no. of images are evenly divided by batchsize"
# generate mask
mask = gen_mask(args.inpaint_method,args.size,args.mask_size)
mask = np.array([mask for i in range(n_test)])
mask = mask.reshape([n_test,1,args.size,args.size])
mask = torch.tensor(mask,dtype=torch.float,requires_grad=False, device=args.device)
# loading dcgan model
generator = Generator(ngpu=1).to(device=args.device)
generator.load_state_dict(torch.load(modeldir+'/dcgan_G.pt'))
generator.eval()
# initializing latent code z from Gaussian
if args.init_strategy == "random":
z_sampled = np.random.normal(0,args.init_std,[n_test,n,1,1])
z_sampled = torch.tensor(z_sampled,requires_grad=True,dtype=torch.float,device=args.device)
else:
raise "only random initialization strategy is supported for inpainting in dcgan"
# selecting optimizer
if args.optim == "adam":
optimizer = torch.optim.Adam([z_sampled], lr=args.lr,)
elif args.optim == "lbfgs":
optimizer = torch.optim.LBFGS([z_sampled], lr=args.lr,)
# metrics to record over training
psnr_t = torch.nn.MSELoss().to(device=args.device)
residual = []
# running optimizer steps
for t in range(args.steps):
def closure():
optimizer.zero_grad()
x_gen = generator(z_sampled)
x_gen = (x_gen + 1)/2
x_masked_test = x_test * mask
x_masked_gen = x_gen * mask
global residual_t
residual_t = ((x_masked_gen - x_masked_test)**2).view(len(x_masked_test),-1).sum(dim=1).mean()
z_reg_loss_t= gamma*z_sampled.norm(dim=1).mean()
loss_t = residual_t + z_reg_loss_t
psnr = psnr_t(x_test, x_gen)
psnr = 10 * np.log10(1 / psnr.item())
print("\rAt step=%0.3d|loss=%0.4f|residual=%0.4f|z_reg=%0.5f|psnr=%0.3f"%(t,loss_t.item(),residual_t.item(),z_reg_loss_t.item(), psnr),end="\r")
loss_t.backward()
return loss_t
optimizer.step(closure)
residual.append(residual_t.item())
# getting recovered and true images
x_test_np = x_test.data.cpu().numpy().transpose(0,2,3,1)
x_gen = generator(z_sampled)
x_gen = (x_gen + 1)/2
x_gen_np = x_gen.data.cpu().numpy().transpose(0,2,3,1)
x_gen_np = np.clip(x_gen_np,0,1)
mask_np = mask.data.cpu().numpy()
x_masked_test = x_test * mask
x_masked_test_np = x_masked_test.data.cpu().numpy().transpose(0,2,3,1)
x_masked_test_np = np.clip(x_masked_test_np,0,1)
Original.append(x_test_np)
Recovered.append(x_gen_np)
Masked.append(x_masked_test_np)
Residual_Curve.append(residual)
Mask.append(mask_np)
# freeing up memory for second loop
generator.zero_grad()
optimizer.zero_grad()
del x_test, x_gen, optimizer, psnr_t, z_sampled, generator, mask,
torch.cuda.empty_cache()
print("\nbatch completed")
# metric evaluations
Original = np.vstack(Original)
Recovered = np.vstack(Recovered)
Masked = np.vstack(Masked)
Mask = np.vstack(Mask)
psnr = [compare_psnr(x, y) for x,y in zip(Original, Recovered)]
# print performance analysis
printout = "+-"*10 + "%s"%args.dataset + "-+"*10 + "\n"
printout = printout + "\t n_test = %d\n"%len(Recovered)
printout = printout + "\t inpaint_method = %s\n"%args.inpaint_method
printout = printout + "\t mask_size = %0.3f\n"%args.mask_size
printout = printout + "\t gamma = %0.6f\n"%gamma
printout = printout + "\t PSNR = %0.3f\n"%np.mean(psnr)
print(printout)
if args.save_metrics_text:
with open("%s_inpaint_dcgan_results.txt"%args.dataset,"a") as f:
f.write('\n' + printout)
# saving images
if args.save_results:
gamma = gamma.item()
file_names = [name[0].split("/")[-1].split(".")[0] for name in test_dataset.samples]
save_path = save_path + "/inpaint_%s_masksize_%0.4f_gamma_%0.6f_steps_%d_lr_%0.3f_init_std_%0.2f_optim_%s"
save_path = save_path%(args.inpaint_method,args.mask_size,gamma,args.steps,args.lr,args.init_std,args.optim)
if not os.path.exists(save_path):
os.makedirs(save_path)
else:
save_path_1 = save_path + "_1"
if not os.path.exists(save_path_1):
os.makedirs(save_path_1)
save_path = save_path_1
else:
save_path_2 = save_path + "_2"
if not os.path.exists(save_path_2):
os.makedirs(save_path_2)
save_path = save_path_2
_ = [sio.imsave(save_path+"/"+name+"_recov.jpg", x) for x,name in zip(Recovered,file_names)]
_ = [sio.imsave(save_path+"/"+name+"_masked.jpg", x) for x,name in zip(Masked,file_names)]
Residual_Curve = np.array(Residual_Curve).mean(axis=0)
np.save(save_path+"/"+"residual_curve.npy", Residual_Curve)
np.save(save_path+"/original.npy", Original)
np.save(save_path+"/recovered.npy", Recovered)
np.save(save_path+"/mask.npy", Mask)
# a function to generate masks
def gen_mask(maskType, imgSize, masksize=0.25):
# the larger the masksize, the bigger the mask
image_shape = [imgSize, imgSize]
if maskType == 'random':
mask = np.ones(image_shape)
mask[np.random.random(image_shape[:2]) < masksize] = 0.0
elif maskType == 'center':
center_scale = -(masksize - 1)/2
assert(center_scale <= 0.5)
mask = np.ones(image_shape)
l = int(imgSize*center_scale)
u = int(imgSize*(1.0-center_scale))
mask[l:u, l:u] = 0.0
elif maskType == 'left':
mask = np.ones(image_shape)
c = imgSize #// 2
masksize = 1 - masksize
c = int(c * masksize)
mask[:, c:] = 0.0
elif maskType == 'bottom':
mask = np.ones(image_shape)
c = imgSize# // 2
masksize = 1 - masksize
c = int(c * masksize)
mask[c:, :] = 0.0
else:
assert(False)
return mask | 47.307175 | 164 | 0.543722 | 2,598 | 21,099 | 4.19361 | 0.114704 | 0.040386 | 0.018173 | 0.014686 | 0.811473 | 0.792106 | 0.771547 | 0.765305 | 0.748784 | 0.741074 | 0 | 0.015421 | 0.345372 | 21,099 | 446 | 165 | 47.307175 | 0.773385 | 0.056922 | 0 | 0.712251 | 0 | 0.014245 | 0.078415 | 0.022915 | 0 | 0 | 0 | 0 | 0.011396 | 1 | 0.017094 | false | 0 | 0.034188 | 0 | 0.059829 | 0.059829 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6a457813a8e1a193b8be3a2e8dd63bf96d50b0dd | 181 | py | Python | src/pretty_number/__init__.py | szsdk/pretty_number | 640367d8e055190a4e11590dc663b14150adf34b | [
"MIT"
] | 1 | 2020-01-01T08:54:37.000Z | 2020-01-01T08:54:37.000Z | src/pretty_number/__init__.py | szsdk/pretty_number | 640367d8e055190a4e11590dc663b14150adf34b | [
"MIT"
] | null | null | null | src/pretty_number/__init__.py | szsdk/pretty_number | 640367d8e055190a4e11590dc663b14150adf34b | [
"MIT"
] | 1 | 2018-11-29T08:50:57.000Z | 2018-11-29T08:50:57.000Z | from ._pretty_number import pretty_float
from ._pretty_number import pretty_int
from ._pretty_number import pretty_number
__all__ = ['pretty_int', 'pretty_float', 'pretty_number']
| 30.166667 | 57 | 0.823204 | 25 | 181 | 5.32 | 0.28 | 0.451128 | 0.360902 | 0.496241 | 0.631579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.099448 | 181 | 5 | 58 | 36.2 | 0.815951 | 0 | 0 | 0 | 0 | 0 | 0.19337 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
dbf8db70e8c170153ca4f81a6f0b06c09c823b86 | 1,927 | py | Python | sdk/python/pulumi_azure_nextgen/documentdb/v20200601preview/__init__.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/documentdb/v20200601preview/__init__.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_nextgen/documentdb/v20200601preview/__init__.py | test-wiz-sec/pulumi-azure-nextgen | 20a695af0d020b34b0f1c336e1b69702755174cc | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .cassandra_resource_cassandra_keyspace import *
from .cassandra_resource_cassandra_table import *
from .database_account import *
from .get_cassandra_resource_cassandra_keyspace import *
from .get_cassandra_resource_cassandra_table import *
from .get_database_account import *
from .get_gremlin_resource_gremlin_database import *
from .get_gremlin_resource_gremlin_graph import *
from .get_mongo_db_resource_mongo_db_collection import *
from .get_mongo_db_resource_mongo_db_database import *
from .get_notebook_workspace import *
from .get_sql_resource_sql_container import *
from .get_sql_resource_sql_database import *
from .get_sql_resource_sql_role_assignment import *
from .get_sql_resource_sql_role_definition import *
from .get_sql_resource_sql_stored_procedure import *
from .get_sql_resource_sql_trigger import *
from .get_sql_resource_sql_user_defined_function import *
from .get_table_resource_table import *
from .gremlin_resource_gremlin_database import *
from .gremlin_resource_gremlin_graph import *
from .list_database_account_connection_strings import *
from .list_database_account_keys import *
from .list_notebook_workspace_connection_info import *
from .mongo_db_resource_mongo_db_collection import *
from .mongo_db_resource_mongo_db_database import *
from .notebook_workspace import *
from .sql_resource_sql_container import *
from .sql_resource_sql_database import *
from .sql_resource_sql_role_assignment import *
from .sql_resource_sql_role_definition import *
from .sql_resource_sql_stored_procedure import *
from .sql_resource_sql_trigger import *
from .sql_resource_sql_user_defined_function import *
from .table_resource_table import *
from ._inputs import *
from . import outputs
| 44.813953 | 80 | 0.852102 | 277 | 1,927 | 5.451264 | 0.234657 | 0.238411 | 0.137748 | 0.074172 | 0.807285 | 0.711921 | 0.345695 | 0.176159 | 0 | 0 | 0 | 0.000575 | 0.097561 | 1,927 | 42 | 81 | 45.880952 | 0.86774 | 0.105345 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e0063e31bbbdbde8d8cd940d103513986e82bb76 | 38,546 | py | Python | devilry/apps/core/tests/test_relateduser.py | devilry/devilry-django | 9ae28e462dfa4cfee966ebacbca04ade9627e715 | [
"BSD-3-Clause"
] | 29 | 2015-01-18T22:56:23.000Z | 2020-11-10T21:28:27.000Z | devilry/apps/core/tests/test_relateduser.py | devilry/devilry-django | 9ae28e462dfa4cfee966ebacbca04ade9627e715 | [
"BSD-3-Clause"
] | 786 | 2015-01-06T16:10:18.000Z | 2022-03-16T11:10:50.000Z | devilry/apps/core/tests/test_relateduser.py | devilry/devilry-django | 9ae28e462dfa4cfee966ebacbca04ade9627e715 | [
"BSD-3-Clause"
] | 15 | 2015-04-06T06:18:43.000Z | 2021-02-24T12:28:30.000Z | from django.conf import settings
from django.db import IntegrityError
from django.test import TestCase
from model_bakery import baker
from devilry.apps.core.models import RelatedExaminer, RelatedStudent
from devilry.devilry_account.exceptions import IllegalOperationError
from devilry.devilry_dbcache.customsql import AssignmentGroupDbCacheCustomSql
from devilry.project.develop.testhelpers.corebuilder import UserBuilder2
class TestRelatedStudentModel(TestCase):
def test_unique_in_period(self):
testperiod = baker.make('core.Period')
testuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedStudent', period=testperiod, user=testuser)
with self.assertRaises(IntegrityError):
baker.make('core.RelatedStudent', period=testperiod, user=testuser)
def test_get_anonymous_name_missing_both_anonymous_id_and_candidate_id(self):
relatedstudent = baker.make('core.RelatedStudent')
self.assertEqual('Automatic anonymous ID missing', relatedstudent.get_anonymous_name())
def test_get_anonymous_name_has_anonymous_id_but_not_candidate_id(self):
relatedstudent = baker.make('core.RelatedStudent',
automatic_anonymous_id='MyAutomaticID')
self.assertEqual('MyAutomaticID', relatedstudent.get_anonymous_name())
def test_get_anonymous_name_has_anonymous_id_and_candidate_id(self):
relatedstudent = baker.make('core.RelatedStudent',
automatic_anonymous_id='MyAutomaticID',
candidate_id='MyCandidateID')
self.assertEqual('MyCandidateID', relatedstudent.get_anonymous_name())
def test_get_anonymous_name_no_anonymous_id_but_has_candidate_id(self):
relatedstudent = baker.make('core.RelatedStudent',
candidate_id='MyCandidateID')
self.assertEqual('MyCandidateID', relatedstudent.get_anonymous_name())
class TestRelatedExaminerModel(TestCase):
def test_unique_in_period(self):
testperiod = baker.make('core.Period')
testuser = baker.make(settings.AUTH_USER_MODEL)
baker.make('core.RelatedExaminer', period=testperiod, user=testuser)
with self.assertRaises(IntegrityError):
baker.make('core.RelatedExaminer', period=testperiod, user=testuser)
def test_get_anonymous_name_missing_anonymous_id(self):
relatedexaminer = baker.make('core.RelatedExaminer')
self.assertEqual('Automatic anonymous ID missing', relatedexaminer.get_anonymous_name())
def test_get_anonymous_name_has_anonymous_id(self):
relatedexaminer = baker.make('core.RelatedExaminer',
automatic_anonymous_id='MyAutomaticID')
self.assertEqual('MyAutomaticID', relatedexaminer.get_anonymous_name())
class TestRelatedExaminerManager(TestCase):
def test_bulk_create_from_emails_not_allowed_with_username_auth_backend(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
with self.assertRaises(IllegalOperationError):
RelatedExaminer.objects.bulk_create_from_emails(testperiod, [])
def test_bulk_create_from_emails_empty_input_list(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedExaminer.objects.bulk_create_from_emails(testperiod, [])
self.assertEqual(0, result.created_relatedusers_queryset.count())
self.assertEqual(0, RelatedExaminer.objects.count())
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_single_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedExaminer.objects.bulk_create_from_emails(
testperiod, ['testuser@example.com'])
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual(1, RelatedExaminer.objects.count())
self.assertEqual('testuser@example.com',
RelatedExaminer.objects.first().user.shortname)
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_multiple_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedExaminer.objects.bulk_create_from_emails(
testperiod, ['testuser1@example.com', 'testuser2@example.com', 'testuser3@example.com'])
self.assertEqual(3, result.created_relatedusers_queryset.count())
self.assertEqual(3, RelatedExaminer.objects.count())
self.assertEqual({'testuser1@example.com', 'testuser2@example.com', 'testuser3@example.com'},
{relatedexaminer.user.shortname for relatedexaminer in RelatedExaminer.objects.all()})
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_exclude_existing(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedExaminer',
period=testperiod,
user=UserBuilder2().add_emails('testuser1@example.com').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedExaminer.objects.bulk_create_from_emails(
period=testperiod,
emails=['testuser1@example.com', 'testuser2@example.com'])
self.assertEqual(2, RelatedExaminer.objects.count())
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual('testuser2@example.com',
result.created_relatedusers_queryset.first().user.shortname)
self.assertEqual({'testuser1@example.com'},
result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_exclude_existing_in_other_period(self):
testperiod = baker.make('core.Period')
otherperiod = baker.make('core.Period')
baker.make('core.RelatedExaminer',
period=otherperiod,
user=UserBuilder2(shortname='testuser1@example.com').add_emails('testuser1@example.com').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedExaminer.objects.bulk_create_from_emails(
period=testperiod,
emails=['testuser1@example.com', 'testuser2@example.com'])
self.assertEqual(3, RelatedExaminer.objects.count())
self.assertEqual(2, result.created_relatedusers_queryset.count())
self.assertEqual({'testuser1@example.com', 'testuser2@example.com'},
{relatedexaminer.user.shortname
for relatedexaminer in result.created_relatedusers_queryset.all()})
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_usernames_not_allowed_with_username_auth_backend(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
with self.assertRaises(IllegalOperationError):
RelatedExaminer.objects.bulk_create_from_usernames(testperiod, [])
def test_bulk_create_from_usernames_empty_input_list(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedExaminer.objects.bulk_create_from_usernames(testperiod, [])
self.assertEqual(0, result.created_relatedusers_queryset.count())
self.assertEqual(0, RelatedExaminer.objects.count())
self.assertEqual(set(), result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_single_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedExaminer.objects.bulk_create_from_usernames(
testperiod, ['testuser'])
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual(1, RelatedExaminer.objects.count())
self.assertEqual('testuser',
RelatedExaminer.objects.first().user.shortname)
self.assertEqual(set(), result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_multiple_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedExaminer.objects.bulk_create_from_usernames(
testperiod, ['testuser1', 'testuser2', 'testuser3'])
self.assertEqual(3, result.created_relatedusers_queryset.count())
self.assertEqual(3, RelatedExaminer.objects.count())
self.assertEqual({'testuser1', 'testuser2', 'testuser3'},
{relatedexaminer.user.shortname for relatedexaminer in RelatedExaminer.objects.all()})
self.assertEqual(set(), result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_exclude_existing(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedExaminer',
period=testperiod,
user=UserBuilder2().add_usernames('testuser1').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedExaminer.objects.bulk_create_from_usernames(
period=testperiod,
usernames=['testuser1', 'testuser2'])
self.assertEqual(2, RelatedExaminer.objects.count())
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual('testuser2',
result.created_relatedusers_queryset.first().user.shortname)
self.assertEqual({'testuser1'},
result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_exclude_existing_in_other_period(self):
testperiod = baker.make('core.Period')
otherperiod = baker.make('core.Period')
baker.make('core.RelatedExaminer',
period=otherperiod,
user=UserBuilder2(shortname='testuser1').add_usernames('testuser1').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedExaminer.objects.bulk_create_from_usernames(
period=testperiod,
usernames=['testuser1', 'testuser2'])
self.assertEqual(3, RelatedExaminer.objects.count())
self.assertEqual(2, result.created_relatedusers_queryset.count())
self.assertEqual({'testuser1', 'testuser2'},
{relatedexaminer.user.shortname
for relatedexaminer in result.created_relatedusers_queryset.all()})
self.assertEqual(set(), result.existing_relateduser_usernames_set)
class TestRelatedStudentManager(TestCase):
def test_bulk_create_from_emails_not_allowed_with_username_auth_backend(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
with self.assertRaises(IllegalOperationError):
RelatedStudent.objects.bulk_create_from_emails(testperiod, [])
def test_bulk_create_from_emails_empty_input_list(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedStudent.objects.bulk_create_from_emails(testperiod, [])
self.assertEqual(0, result.created_relatedusers_queryset.count())
self.assertEqual(0, RelatedStudent.objects.count())
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_single_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedStudent.objects.bulk_create_from_emails(
testperiod, ['testuser@example.com'])
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual(1, RelatedStudent.objects.count())
self.assertEqual('testuser@example.com',
RelatedStudent.objects.first().user.shortname)
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_multiple_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedStudent.objects.bulk_create_from_emails(
testperiod, ['testuser1@example.com', 'testuser2@example.com', 'testuser3@example.com'])
self.assertEqual(3, result.created_relatedusers_queryset.count())
self.assertEqual(3, RelatedStudent.objects.count())
self.assertEqual({'testuser1@example.com', 'testuser2@example.com', 'testuser3@example.com'},
{relatedexaminer.user.shortname for relatedexaminer in RelatedStudent.objects.all()})
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_exclude_existing(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
period=testperiod,
user=UserBuilder2().add_emails('testuser1@example.com').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedStudent.objects.bulk_create_from_emails(
period=testperiod,
emails=['testuser1@example.com', 'testuser2@example.com'])
self.assertEqual(2, RelatedStudent.objects.count())
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual('testuser2@example.com',
result.created_relatedusers_queryset.first().user.shortname)
self.assertEqual({'testuser1@example.com'},
result.existing_relateduser_emails_set)
def test_bulk_create_from_emails_exclude_existing_in_other_period(self):
testperiod = baker.make('core.Period')
otherperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
period=otherperiod,
user=UserBuilder2(shortname='testuser1@example.com').add_emails('testuser1@example.com').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
result = RelatedStudent.objects.bulk_create_from_emails(
period=testperiod,
emails=['testuser1@example.com', 'testuser2@example.com'])
self.assertEqual(3, RelatedStudent.objects.count())
self.assertEqual(2, result.created_relatedusers_queryset.count())
self.assertEqual({'testuser1@example.com', 'testuser2@example.com'},
{relatedexaminer.user.shortname
for relatedexaminer in result.created_relatedusers_queryset.all()})
self.assertEqual(set(), result.existing_relateduser_emails_set)
def test_bulk_create_from_usernames_not_allowed_with_username_auth_backend(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=True):
with self.assertRaises(IllegalOperationError):
RelatedStudent.objects.bulk_create_from_usernames(testperiod, [])
def test_bulk_create_from_usernames_empty_input_list(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedStudent.objects.bulk_create_from_usernames(testperiod, [])
self.assertEqual(0, result.created_relatedusers_queryset.count())
self.assertEqual(0, RelatedStudent.objects.count())
self.assertEqual(set(), result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_single_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedStudent.objects.bulk_create_from_usernames(
testperiod, ['testuser'])
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual(1, RelatedStudent.objects.count())
self.assertEqual('testuser',
RelatedStudent.objects.first().user.shortname)
self.assertEqual(set(), result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_multiple_new(self):
testperiod = baker.make('core.Period')
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedStudent.objects.bulk_create_from_usernames(
testperiod, ['testuser1', 'testuser2', 'testuser3'])
self.assertEqual(3, result.created_relatedusers_queryset.count())
self.assertEqual(3, RelatedStudent.objects.count())
self.assertEqual({'testuser1', 'testuser2', 'testuser3'},
{relatedexaminer.user.shortname for relatedexaminer in RelatedStudent.objects.all()})
self.assertEqual(set(), result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_exclude_existing(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
period=testperiod,
user=UserBuilder2().add_usernames('testuser1').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedStudent.objects.bulk_create_from_usernames(
period=testperiod,
usernames=['testuser1', 'testuser2'])
self.assertEqual(2, RelatedStudent.objects.count())
self.assertEqual(1, result.created_relatedusers_queryset.count())
self.assertEqual('testuser2',
result.created_relatedusers_queryset.first().user.shortname)
self.assertEqual({'testuser1'},
result.existing_relateduser_usernames_set)
def test_bulk_create_from_usernames_exclude_existing_in_other_period(self):
testperiod = baker.make('core.Period')
otherperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
period=otherperiod,
user=UserBuilder2(shortname='testuser1').add_usernames('testuser1').user)
with self.settings(CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND=False):
result = RelatedStudent.objects.bulk_create_from_usernames(
period=testperiod,
usernames=['testuser1', 'testuser2'])
self.assertEqual(3, RelatedStudent.objects.count())
self.assertEqual(2, result.created_relatedusers_queryset.count())
self.assertEqual({'testuser1', 'testuser2'},
{relatedexaminer.user.shortname
for relatedexaminer in result.created_relatedusers_queryset.all()})
self.assertEqual(set(), result.existing_relateduser_usernames_set)
class TestRelatedStudentQuerySet(TestCase):
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def __make_published_feedbackset_for_relatedstudent(self, relatedstudent, assignment, grading_points=0):
from devilry.devilry_group import devilry_group_baker_factories as group_baker
group = baker.make('core.AssignmentGroup', parentnode=assignment)
group_baker.feedbackset_first_attempt_published(group=group, grading_points=grading_points)
baker.make('core.Candidate', assignment_group=group, relatedstudent=relatedstudent)
return relatedstudent
def test_annotate_with_total_grading_points_assignments_filter_sanity_before_annotation(self):
test_assignment1 = baker.make('core.Assignment', max_points=50)
test_assignment2 = baker.make('core.Assignment', max_points=50)
relatedstudent = baker.make('core.RelatedStudent')
queryset = RelatedStudent.objects \
.filter(candidate__assignment_group__parentnode_id__in=[test_assignment1.id, test_assignment2.id])\
.annotate_with_total_grading_points(assignment_ids=[test_assignment1.id, test_assignment2.id])
self.assertNotIn(relatedstudent, queryset)
def test_annotate_with_total_grading_points_sanity(self):
test_assignment1 = baker.make('core.Assignment', max_points=50)
test_assignment2 = baker.make('core.Assignment', max_points=50)
relatedstudent = baker.make('core.RelatedStudent')
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent,
assignment=test_assignment1,
grading_points=25)
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent,
assignment=test_assignment2,
grading_points=25)
queryset = RelatedStudent.objects \
.annotate_with_total_grading_points(assignment_ids=[test_assignment1.id, test_assignment2.id])
self.assertEqual(queryset.get(id=relatedstudent.id).grade_points_total, 50)
def test_annotated_with_total_grading_points_zero_for_relatedstudent_not_on_assignment(self):
test_assignment1 = baker.make('core.Assignment', max_points=50)
test_assignment2 = baker.make('core.Assignment', max_points=50)
relatedstudent = baker.make('core.RelatedStudent')
queryset = RelatedStudent.objects \
.annotate_with_total_grading_points(assignment_ids=[test_assignment1.id, test_assignment2.id])
self.assertEqual(queryset.get(id=relatedstudent.id).grade_points_total, 0)
def test_annotate_with_total_points_relatedstudent_not_on_one_assignment(self):
test_assignment1 = baker.make('core.Assignment', max_points=50)
test_assignment2 = baker.make('core.Assignment', max_points=50)
relatedstudent = baker.make('core.RelatedStudent')
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent,
assignment=test_assignment1,
grading_points=25)
queryset = RelatedStudent.objects \
.annotate_with_total_grading_points(assignment_ids=[test_assignment1.id, test_assignment2.id])
self.assertEqual(queryset.get(id=relatedstudent.id).grade_points_total, 25)
def test_annotate_with_total_points_relatedstudent_not_on_any_assignment(self):
test_assignment1 = baker.make('core.Assignment', max_points=50)
test_assignment2 = baker.make('core.Assignment', max_points=50)
relatedstudent = baker.make('core.RelatedStudent')
queryset = RelatedStudent.objects \
.annotate_with_total_grading_points(assignment_ids=[test_assignment1.id, test_assignment2.id])
self.assertEqual(queryset.get(id=relatedstudent.id).grade_points_total, 0)
def test_annotate_with_total_grading_points_multiple_relatedstudents(self):
test_assignment1 = baker.make('core.Assignment', max_points=50)
test_assignment2 = baker.make('core.Assignment', max_points=50)
relatedstudent1 = baker.make('core.RelatedStudent', user__fullname='Test1')
relatedstudent2 = baker.make('core.RelatedStudent', user__fullname='Test2')
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent1,
assignment=test_assignment1,
grading_points=25)
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent1,
assignment=test_assignment2,
grading_points=25)
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent2,
assignment=test_assignment1,
grading_points=10)
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent2,
assignment=test_assignment2,
grading_points=10)
queryset = RelatedStudent.objects \
.annotate_with_total_grading_points(assignment_ids=[test_assignment1.id, test_assignment2.id])
self.assertEqual(queryset.get(id=relatedstudent1.id).grade_points_total, 50)
self.assertEqual(queryset.get(id=relatedstudent2.id).grade_points_total, 20)
def test_annotate_with_total_points_query_count(self):
test_assignment1 = baker.make('core.Assignment', max_points=50)
test_assignment2 = baker.make('core.Assignment', max_points=50)
relatedstudent1 = baker.make('core.RelatedStudent', user__fullname='Test1')
relatedstudent2 = baker.make('core.RelatedStudent', user__fullname='Test2')
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent1,
assignment=test_assignment1,
grading_points=25)
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent1,
assignment=test_assignment2,
grading_points=25)
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent2,
assignment=test_assignment1,
grading_points=10)
self.__make_published_feedbackset_for_relatedstudent(
relatedstudent=relatedstudent2,
assignment=test_assignment2,
grading_points=10)
with self.assertNumQueries(1):
queryset = RelatedStudent.objects \
.annotate_with_total_grading_points(assignment_ids=[test_assignment1.id, test_assignment2.id])
len(queryset)
def test_get_userid_to_candidateid_map_no_relatedstudents(self):
testperiod = baker.make('core.Period')
self.assertEqual(dict(),
testperiod.relatedstudent_set.get_userid_to_candidateid_map())
def test_get_userid_to_candidateid_map_ignore_candidate_id_none(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
period=testperiod,
candidate_id=None)
self.assertEqual(dict(),
testperiod.relatedstudent_set.get_userid_to_candidateid_map())
def test_get_userid_to_candidateid_map_ignore_candidate_id_emptystring(self):
testperiod = baker.make('core.Period')
baker.make('core.RelatedStudent',
period=testperiod,
candidate_id='')
self.assertEqual(dict(),
testperiod.relatedstudent_set.get_userid_to_candidateid_map())
def test_get_userid_to_candidateid_map(self):
testperiod = baker.make('core.Period')
relatedstudent1 = baker.make('core.RelatedStudent',
period=testperiod,
candidate_id='a')
relatedstudent2 = baker.make('core.RelatedStudent',
period=testperiod,
candidate_id='b')
relatedstudent3 = baker.make('core.RelatedStudent',
period=testperiod,
candidate_id='c')
self.assertEqual(
{
relatedstudent1.user_id: 'a',
relatedstudent2.user_id: 'b',
relatedstudent3.user_id: 'c',
},
testperiod.relatedstudent_set.get_userid_to_candidateid_map())
class TestRelatedStudentQuerySetPrefetchSyncsystemtags(TestCase):
def test_none(self):
relatedstudent = baker.make('core.RelatedStudent')
prefetched_relatedstudent = RelatedStudent.objects.prefetch_syncsystemtag_objects()\
.get(id=relatedstudent.id)
self.assertEqual([], prefetched_relatedstudent.syncsystemtag_objects)
def test_ordering(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent', period=testperiod)
testperiodtag_a = baker.make('core.PeriodTag', period=testperiod, tag='a')
testperiodtag_b = baker.make('core.PeriodTag', period=testperiod, tag='b')
testperiodtag_c = baker.make('core.PeriodTag', period=testperiod, tag='c')
testperiodtag_b.relatedstudents.add(relatedstudent)
testperiodtag_a.relatedstudents.add(relatedstudent)
testperiodtag_c.relatedstudents.add(relatedstudent)
prefetched_relatedstudent = RelatedStudent.objects.prefetch_syncsystemtag_objects()\
.get(id=relatedstudent.id)
self.assertEqual([testperiodtag_a, testperiodtag_b, testperiodtag_c],
prefetched_relatedstudent.syncsystemtag_objects)
def test_syncsystemtag_stringlist_not_using_prefetch_syncsystemtag_objects(self):
relatedstudent = baker.make('core.RelatedStudent')
with self.assertRaisesMessage(AttributeError,
'The syncsystemtag_stringlist property requires '
'RelatedStudentQuerySet.prefetch_syncsystemtag_objects().'):
str(relatedstudent.syncsystemtag_stringlist)
def test_syncsystemtag_stringlist(self):
testperiod = baker.make('core.Period')
relatedstudent = baker.make('core.RelatedStudent', period=testperiod)
testperiodtag_a = baker.make('core.PeriodTag', period=testperiod, tag='a')
testperiodtag_b = baker.make('core.PeriodTag', period=testperiod, tag='b')
testperiodtag_b.relatedstudents.add(relatedstudent)
testperiodtag_a.relatedstudents.add(relatedstudent)
prefetched_relatedstudent = RelatedStudent.objects.prefetch_syncsystemtag_objects()\
.get(id=relatedstudent.id)
self.assertEqual(['a', 'b'], prefetched_relatedstudent.syncsystemtag_stringlist)
class RelatedExaminerQuerySetAnnotateWithNumberOfGroupsOnAssignment(TestCase):
def test_no_groups(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
queryset = RelatedExaminer.objects\
.annotate_with_number_of_groups_on_assignment(assignment=testassignment)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
0, annotated_relatedexaminer.number_of_groups_on_assignment)
def test_not_within_assignment(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
baker.make('core.Examiner', relatedexaminer=relatedexaminer) # Not within testassignment
queryset = RelatedExaminer.objects\
.annotate_with_number_of_groups_on_assignment(assignment=testassignment)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
0, annotated_relatedexaminer.number_of_groups_on_assignment)
def test_multiple_groups(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
baker.make('core.Examiner',
assignmentgroup__parentnode=testassignment,
relatedexaminer=relatedexaminer)
baker.make('core.Examiner',
assignmentgroup__parentnode=testassignment,
relatedexaminer=relatedexaminer)
queryset = RelatedExaminer.objects\
.annotate_with_number_of_groups_on_assignment(assignment=testassignment)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
2, annotated_relatedexaminer.number_of_groups_on_assignment)
def test_multiple_relatedexaminers(self):
relatedexaminer1 = baker.make('core.RelatedExaminer')
relatedexaminer2 = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
baker.make('core.Examiner',
assignmentgroup__parentnode=testassignment,
relatedexaminer=relatedexaminer1)
baker.make('core.Examiner',
assignmentgroup__parentnode=testassignment,
relatedexaminer=relatedexaminer1)
baker.make('core.Examiner',
assignmentgroup__parentnode=testassignment,
relatedexaminer=relatedexaminer2)
queryset = RelatedExaminer.objects\
.annotate_with_number_of_groups_on_assignment(assignment=testassignment)
annotated_relatedexaminer1 = queryset.get(id=relatedexaminer1.id)
self.assertEqual(
2, annotated_relatedexaminer1.number_of_groups_on_assignment)
annotated_relatedexaminer2 = queryset.get(id=relatedexaminer2.id)
self.assertEqual(
1, annotated_relatedexaminer2.number_of_groups_on_assignment)
class RelatedExaminerQuerySetExtraAnnotateWithNumberOfCandidatesOnAssignment(TestCase):
def setUp(self):
AssignmentGroupDbCacheCustomSql().initialize()
def test_no_groups(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
queryset = RelatedExaminer.objects\
.extra_annotate_with_number_of_candidates_on_assignment(assignment=testassignment)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
0, annotated_relatedexaminer.number_of_candidates_on_assignment)
def test_only_within_assignment(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
testgroup = baker.make('core.AssignmentGroup') # Not within testassignment
baker.make('core.Examiner',
assignmentgroup=testgroup,
relatedexaminer=relatedexaminer)
baker.make('core.Candidate',
assignment_group=testgroup)
queryset = RelatedExaminer.objects\
.extra_annotate_with_number_of_candidates_on_assignment(assignment=testassignment)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
0, annotated_relatedexaminer.number_of_candidates_on_assignment)
def test_only_within_assignment_sanity(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment1 = baker.make('core.Assignment')
testassignment2 = baker.make('core.Assignment')
testgroup1 = baker.make('core.AssignmentGroup', parentnode=testassignment1)
baker.make('core.Examiner',
assignmentgroup=testgroup1,
relatedexaminer=relatedexaminer)
baker.make('core.Candidate',
assignment_group=testgroup1)
testgroup2 = baker.make('core.AssignmentGroup', parentnode=testassignment2)
baker.make('core.Examiner',
assignmentgroup=testgroup2,
relatedexaminer=relatedexaminer)
baker.make('core.Candidate',
assignment_group=testgroup2)
baker.make('core.Candidate',
assignment_group=testgroup2)
# Test group 1
queryset = RelatedExaminer.objects\
.extra_annotate_with_number_of_candidates_on_assignment(assignment=testassignment1)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
1, annotated_relatedexaminer.number_of_candidates_on_assignment)
# Test group 2
queryset = RelatedExaminer.objects \
.extra_annotate_with_number_of_candidates_on_assignment(assignment=testassignment2)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
2, annotated_relatedexaminer.number_of_candidates_on_assignment)
def test_multiple_candidates(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
testgroup = baker.make('core.AssignmentGroup', parentnode=testassignment)
baker.make('core.Examiner',
assignmentgroup=testgroup,
relatedexaminer=relatedexaminer)
baker.make('core.Candidate',
assignment_group=testgroup)
baker.make('core.Candidate',
assignment_group=testgroup)
queryset = RelatedExaminer.objects\
.extra_annotate_with_number_of_candidates_on_assignment(assignment=testassignment)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
2, annotated_relatedexaminer.number_of_candidates_on_assignment)
def test_multiple_examiner_objects(self):
relatedexaminer = baker.make('core.RelatedExaminer')
testassignment = baker.make('core.Assignment')
testgroup1 = baker.make('core.AssignmentGroup', parentnode=testassignment)
baker.make('core.Examiner',
assignmentgroup=testgroup1,
relatedexaminer=relatedexaminer)
baker.make('core.Candidate',
assignment_group=testgroup1)
baker.make('core.Candidate',
assignment_group=testgroup1)
testgroup2 = baker.make('core.AssignmentGroup', parentnode=testassignment)
baker.make('core.Examiner',
assignmentgroup=testgroup2,
relatedexaminer=relatedexaminer)
baker.make('core.Candidate',
assignment_group=testgroup2)
queryset = RelatedExaminer.objects\
.extra_annotate_with_number_of_candidates_on_assignment(assignment=testassignment)
annotated_relatedexaminer = queryset.get(id=relatedexaminer.id)
self.assertEqual(
3, annotated_relatedexaminer.number_of_candidates_on_assignment)
| 54.213783 | 115 | 0.691589 | 3,733 | 38,546 | 6.840343 | 0.053844 | 0.050049 | 0.071275 | 0.026787 | 0.908204 | 0.897787 | 0.878715 | 0.857216 | 0.840258 | 0.827374 | 0 | 0.00958 | 0.220049 | 38,546 | 710 | 116 | 54.290141 | 0.839775 | 0.001998 | 0 | 0.813205 | 0 | 0 | 0.093568 | 0.020643 | 0 | 0 | 0 | 0 | 0.185185 | 1 | 0.095008 | false | 0 | 0.014493 | 0 | 0.123994 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e0306b9c2b493ad3e6f8ab8ae67fd98f9999884b | 66,554 | py | Python | code/python/tools/dataset_download_images.py | lingjie0206/ml-hypersim | 2408fbafe580246108585f9c46780dc62f284cfc | [
"AML"
] | 1 | 2021-07-29T15:13:55.000Z | 2021-07-29T15:13:55.000Z | code/python/tools/dataset_download_images.py | lingjie0206/ml-hypersim | 2408fbafe580246108585f9c46780dc62f284cfc | [
"AML"
] | null | null | null | code/python/tools/dataset_download_images.py | lingjie0206/ml-hypersim | 2408fbafe580246108585f9c46780dc62f284cfc | [
"AML"
] | null | null | null | #
# For licensing see accompanying LICENSE.txt file.
# Copyright (C) 2020 Apple Inc. All Rights Reserved.
#
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument("--downloads_dir", required=True)
parser.add_argument("--decompress_dir")
args = parser.parse_args()
print("[HYPERSIM: DATASET_DOWNLOAD_IMAGES] Begin...")
if not os.path.exists(args.downloads_dir): os.makedirs(args.downloads_dir)
if args.decompress_dir is not None:
if not os.path.exists(args.decompress_dir): os.makedirs(args.decompress_dir)
decompress_dir = args.decompress_dir
else:
decompress_dir = None
def download(url, downloads_dir, decompress_dir):
download_name = os.path.basename(url)
download_file = os.path.join(downloads_dir, download_name)
cmd = "curl " + url + " --output " + download_file
print("")
print(cmd)
print("")
retval = os.system(cmd)
assert retval == 0
if decompress_dir is not None:
cmd = "unzip " + download_file + " -d " + decompress_dir
print("")
print(cmd)
print("")
retval = os.system(cmd)
assert retval == 0
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_001_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_002_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_003_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_004_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_005_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_006_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_007_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_008_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_009_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_010_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_011_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_012_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_013_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_014_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_014_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_014_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_015_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_016_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_017_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_018_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_019_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_021_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_022_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_023_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_011.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_012.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_013.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_014.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_015.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_016.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_017.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_018.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_024_019.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_011.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_012.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_013.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_014.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_015.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_016.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_017.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_018.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_019.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_026_020.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_027_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_028_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_029_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_030_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_031_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_032_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_033_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_034_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_035_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_036_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_037_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_038_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_039_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_041_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_042_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_043_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_044_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_045_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_046_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_047_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_048_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_050_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_051_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_052_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_012.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_013.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_014.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_016.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_017.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_018.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_019.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_053_020.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_054_010.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_001.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_002.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_003.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_004.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_005.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_006.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_007.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_008.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_009.zip", args.downloads_dir, decompress_dir)
download("https://docs-assets.developer.apple.com/ml-research/datasets/hypersim/v1/scenes/ai_055_010.zip", args.downloads_dir, decompress_dir)
print("[HYPERSIM: DATASET_DOWNLOAD_IMAGES] Finished.")
| 129.734893 | 142 | 0.822625 | 10,209 | 66,554 | 5.181115 | 0.01195 | 0.114777 | 0.138844 | 0.216471 | 0.990301 | 0.98756 | 0.986766 | 0.986104 | 0.986104 | 0.986104 | 0 | 0.049346 | 0.024116 | 66,554 | 512 | 143 | 129.988281 | 0.765046 | 0.001488 | 0 | 0.020534 | 0 | 0.938398 | 0.648653 | 0.000722 | 0 | 0 | 0 | 0 | 0.004107 | 1 | 0.002053 | false | 0 | 0.004107 | 0 | 0.00616 | 0.016427 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
e05f342fc2ae45e5cf5d7151c9d9e26c6dd7d6ba | 282 | py | Python | flow/envs/__init__.py | mark-koren/flow | f3f6d7e9c64f6b641a464a716c7f38ca00388805 | [
"MIT"
] | null | null | null | flow/envs/__init__.py | mark-koren/flow | f3f6d7e9c64f6b641a464a716c7f38ca00388805 | [
"MIT"
] | null | null | null | flow/envs/__init__.py | mark-koren/flow | f3f6d7e9c64f6b641a464a716c7f38ca00388805 | [
"MIT"
] | null | null | null | from flow.envs.two_intersection import *
from flow.envs.loop_accel import *
from flow.envs.lane_changing import *
from flow.envs.loop_merges import *
from flow.envs.two_loops_one_merging import *
from flow.envs.loop_with_perturbation import *
from flow.envs.my_environment import *
| 35.25 | 46 | 0.826241 | 45 | 282 | 4.955556 | 0.4 | 0.251121 | 0.376682 | 0.484305 | 0.295964 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.099291 | 282 | 7 | 47 | 40.285714 | 0.877953 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e06c58e57f44b02adb8759c7d6517ea3a0c695a5 | 9,390 | py | Python | python/ray/serve/tests/test_batching.py | valiantljk-tt/ray | a4d1e322cbe787c433fb6d955a6621b3e082ad24 | [
"Apache-2.0"
] | 1 | 2021-04-25T17:16:29.000Z | 2021-04-25T17:16:29.000Z | python/ray/serve/tests/test_batching.py | tjcommV2X/ray | 3965310f939cfbb0d700174529ee5bc7d4871de8 | [
"Apache-2.0"
] | 69 | 2021-03-27T07:03:21.000Z | 2022-03-05T08:07:51.000Z | python/ray/serve/tests/test_batching.py | tjcommV2X/ray | 3965310f939cfbb0d700174529ee5bc7d4871de8 | [
"Apache-2.0"
] | null | null | null | import asyncio
import pytest
import ray
from ray import serve
from ray.serve.config import BackendConfig
def test_batching(serve_instance):
class BatchingExample:
def __init__(self):
self.count = 0
@serve.accept_batch
def __call__(self, requests):
self.count += 1
batch_size = len(requests)
return [self.count] * batch_size
# set the max batch size
config = BackendConfig(max_batch_size=5, batch_wait_timeout=1)
serve.create_backend("counter:v11", BatchingExample, config=config)
serve.create_endpoint(
"counter1", backend="counter:v11", route="/increment2")
future_list = []
handle = serve.get_handle("counter1")
for _ in range(20):
f = handle.remote(temp=1)
future_list.append(f)
counter_result = ray.get(future_list)
# since count is only updated per batch of queries
# If there atleast one __call__ fn call with batch size greater than 1
# counter result will always be less than 20
assert max(counter_result) < 20
def test_batching_exception(serve_instance):
class NoListReturned:
def __init__(self):
self.count = 0
@serve.accept_batch
def __call__(self, requests):
return len(requests)
# Set the max batch size.
config = BackendConfig(max_batch_size=5)
serve.create_backend("exception:v1", NoListReturned, config=config)
serve.create_endpoint("exception-test", backend="exception:v1")
handle = serve.get_handle("exception-test")
with pytest.raises(ray.exceptions.RayTaskError):
assert ray.get(handle.remote(temp=1))
def test_app_level_batching(serve_instance):
class BatchingExample:
def __init__(self):
self.count = 0
@serve.batch(max_batch_size=5, batch_wait_timeout_s=1)
async def handle_batch(self, requests):
self.count += 1
batch_size = len(requests)
return [self.count] * batch_size
async def __call__(self, request):
return await self.handle_batch(request)
# set the max batch size
serve.create_backend("counter:v11", BatchingExample)
serve.create_endpoint(
"counter1", backend="counter:v11", route="/increment2")
future_list = []
handle = serve.get_handle("counter1")
for _ in range(20):
f = handle.remote(temp=1)
future_list.append(f)
counter_result = ray.get(future_list)
# since count is only updated per batch of queries
# If there atleast one __call__ fn call with batch size greater than 1
# counter result will always be less than 20
assert max(counter_result) < 20
def test_app_level_batching_exception(serve_instance):
class NoListReturned:
def __init__(self):
self.count = 0
@serve.batch(max_batch_size=5)
async def handle_batch(self, requests):
return len(requests)
async def __call__(self, request):
return await self.handle_batch(request)
# Set the max batch size.
serve.create_backend("exception:v1", NoListReturned)
serve.create_endpoint("exception-test", backend="exception:v1")
handle = serve.get_handle("exception-test")
with pytest.raises(ray.exceptions.RayTaskError):
assert ray.get(handle.remote(temp=1))
@pytest.mark.asyncio
async def test_decorator_validation():
@serve.batch
async def function():
pass
@serve.batch(max_batch_size=10, batch_wait_timeout_s=1.5)
async def function2():
pass
class Class():
@serve.batch
async def method(self):
pass
class Class2():
@serve.batch(max_batch_size=10, batch_wait_timeout_s=1.5)
async def method(self):
pass
with pytest.raises(TypeError, match="async def"):
@serve.batch
def non_async_function():
pass
with pytest.raises(TypeError, match="async def"):
class NotAsync:
@serve.batch
def method(self, requests):
pass
with pytest.raises(ValueError):
class ZeroBatch:
@serve.batch(max_batch_size=0)
async def method(self, requests):
pass
with pytest.raises(TypeError):
class FloatNonIntBatch:
@serve.batch(max_batch_size=1.1)
async def method(self, requests):
pass
class FloatIntegerBatch:
@serve.batch(max_batch_size=1.0)
async def method(self, requests):
pass
with pytest.raises(ValueError):
class NegativeTimeout:
@serve.batch(batch_wait_timeout_s=-0.1)
async def method(self, requests):
pass
class FloatZeroTimeout:
@serve.batch(batch_wait_timeout_s=0.0)
async def method(self, requests):
pass
class IntZeroTimeout:
@serve.batch(batch_wait_timeout_s=0)
async def method(self, requests):
pass
with pytest.raises(TypeError):
class NonTimeout:
@serve.batch(batch_wait_timeout_s="a")
async def method(self, requests):
pass
@pytest.mark.asyncio
@pytest.mark.parametrize("use_class", [True, False])
async def test_batch_size_one_long_timeout(use_class):
@serve.batch(max_batch_size=1, batch_wait_timeout_s=1000)
async def long_timeout(requests):
if "raise" in requests:
1 / 0
return requests
class LongTimeout:
@serve.batch(max_batch_size=1, batch_wait_timeout_s=1000)
async def long_timeout(self, requests):
if "raise" in requests:
1 / 0
return requests
cls = LongTimeout()
async def call(arg):
if use_class:
return await cls.long_timeout(arg)
else:
return await long_timeout(arg)
assert await call("hi") == "hi"
with pytest.raises(ZeroDivisionError):
await call("raise")
@pytest.mark.asyncio
@pytest.mark.parametrize("use_class", [True, False])
async def test_batch_size_multiple_zero_timeout(use_class):
@serve.batch(max_batch_size=2, batch_wait_timeout_s=0)
async def zero_timeout(requests):
await asyncio.sleep(1)
if "raise" in requests:
1 / 0
return requests
class ZeroTimeout:
@serve.batch(max_batch_size=2, batch_wait_timeout_s=0)
async def zero_timeout(self, requests):
await asyncio.sleep(1)
if "raise" in requests:
1 / 0
return requests
cls = ZeroTimeout()
async def call(arg):
if use_class:
return await cls.zero_timeout(arg)
else:
return await zero_timeout(arg)
assert await call("hi") == "hi"
with pytest.raises(ZeroDivisionError):
await call("raise")
# Check that 2 requests will be executed together if available.
# The first should cause a size-one batch to be executed, then
# the next two should be executed together (signaled by both
# having the exception).
t1 = asyncio.get_event_loop().create_task(call("hi1"))
await asyncio.sleep(0.5)
t2 = asyncio.get_event_loop().create_task(call("hi2"))
t3 = asyncio.get_event_loop().create_task(call("raise"))
assert await t1 == "hi1"
with pytest.raises(ZeroDivisionError):
await t2
with pytest.raises(ZeroDivisionError):
await t3
@pytest.mark.asyncio
@pytest.mark.parametrize("use_class", [True, False])
async def test_batch_size_multiple_long_timeout(use_class):
@serve.batch(max_batch_size=3, batch_wait_timeout_s=1000)
async def long_timeout(requests):
if "raise" in requests:
1 / 0
return requests
class LongTimeout:
@serve.batch(max_batch_size=3, batch_wait_timeout_s=1000)
async def long_timeout(self, requests):
if "raise" in requests:
1 / 0
return requests
cls = LongTimeout()
async def call(arg):
if use_class:
return await cls.long_timeout(arg)
else:
return await long_timeout(arg)
t1 = asyncio.get_event_loop().create_task(call("hi1"))
t2 = asyncio.get_event_loop().create_task(call("hi2"))
done, pending = await asyncio.wait([t1, t2], timeout=0.1)
assert len(done) == 0
t3 = asyncio.get_event_loop().create_task(call("hi3"))
done, pending = await asyncio.wait([t1, t2, t3], timeout=100)
assert set(done) == {t1, t2, t3}
assert [t1.result(), t2.result(), t3.result()] == ["hi1", "hi2", "hi3"]
t1 = asyncio.get_event_loop().create_task(call("hi1"))
t2 = asyncio.get_event_loop().create_task(call("raise"))
done, pending = await asyncio.wait([t1, t2], timeout=0.1)
assert len(done) == 0
t3 = asyncio.get_event_loop().create_task(call("hi3"))
done, pending = await asyncio.wait([t1, t2, t3], timeout=100)
assert set(done) == {t1, t2, t3}
assert all(isinstance(t.exception(), ZeroDivisionError) for t in done)
with pytest.raises(ZeroDivisionError):
t1.result()
with pytest.raises(ZeroDivisionError):
t2.result()
with pytest.raises(ZeroDivisionError):
t3.result()
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-v", "-s", __file__]))
| 29.621451 | 75 | 0.637913 | 1,192 | 9,390 | 4.825503 | 0.136745 | 0.041725 | 0.039638 | 0.040682 | 0.8637 | 0.808762 | 0.758519 | 0.732267 | 0.694715 | 0.672983 | 0 | 0.023888 | 0.259957 | 9,390 | 316 | 76 | 29.71519 | 0.803857 | 0.066028 | 0 | 0.732759 | 0 | 0 | 0.040091 | 0 | 0 | 0 | 0 | 0 | 0.056034 | 1 | 0.051724 | false | 0.056034 | 0.025862 | 0.00431 | 0.228448 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
e073d2cb728b745f6e0d5285e2d78e6a5ff96d21 | 132 | py | Python | packages/grid/backend/app/app/crud/__init__.py | callezenwaka/PySyft | 2545c302441cfe727ec095c4f9aa136bff02be32 | [
"Apache-1.1"
] | 1 | 2021-09-14T10:56:43.000Z | 2021-09-14T10:56:43.000Z | packages/grid/backend/app/app/crud/__init__.py | callezenwaka/PySyft | 2545c302441cfe727ec095c4f9aa136bff02be32 | [
"Apache-1.1"
] | 2 | 2021-04-02T10:12:44.000Z | 2021-04-02T10:12:50.000Z | packages/grid/backend/app/app/crud/__init__.py | callezenwaka/PySyft | 2545c302441cfe727ec095c4f9aa136bff02be32 | [
"Apache-1.1"
] | 1 | 2021-08-19T12:23:01.000Z | 2021-08-19T12:23:01.000Z | # relative
from .base import CRUDBase # noqa: F4
from .crud_item import item # noqa: F4
from .crud_user import user # noqa: F401
| 26.4 | 41 | 0.727273 | 21 | 132 | 4.47619 | 0.52381 | 0.12766 | 0.212766 | 0.297872 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.04717 | 0.19697 | 132 | 4 | 42 | 33 | 0.839623 | 0.280303 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0eda07f16e89434f2ae109ffffae4809e261ed76 | 53 | py | Python | spekulatio/build_file_tree/__init__.py | iwilltry42/spekulatio | 42d678b7d7fcc13284902be5a08fb0407d96ec4d | [
"MIT"
] | 10 | 2019-03-19T23:05:04.000Z | 2022-01-19T14:08:06.000Z | spekulatio/build_file_tree/__init__.py | iwilltry42/spekulatio | 42d678b7d7fcc13284902be5a08fb0407d96ec4d | [
"MIT"
] | 6 | 2019-03-23T08:38:44.000Z | 2020-11-24T20:50:14.000Z | spekulatio/build_file_tree/__init__.py | iwilltry42/spekulatio | 42d678b7d7fcc13284902be5a08fb0407d96ec4d | [
"MIT"
] | 1 | 2019-09-26T12:21:36.000Z | 2019-09-26T12:21:36.000Z | from .build_file_tree import build_file_tree # noqa
| 26.5 | 52 | 0.830189 | 9 | 53 | 4.444444 | 0.666667 | 0.45 | 0.65 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.132075 | 53 | 1 | 53 | 53 | 0.869565 | 0.075472 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
0efed40d16ad4c6ff5382f223138daf13eafb8e5 | 324 | py | Python | torchcv/transform/__init__.py | qqadssp/DSOD-Pytorch | 76d7558bb20e7289ebf6acf50be4cef75d568499 | [
"MIT"
] | 13 | 2018-08-15T08:44:31.000Z | 2021-04-16T01:54:01.000Z | torchcv/transform/__init__.py | qqadssp/DSOD-Pytorch | 76d7558bb20e7289ebf6acf50be4cef75d568499 | [
"MIT"
] | 1 | 2022-03-11T08:55:17.000Z | 2022-03-12T05:42:53.000Z | torchcv/transform/__init__.py | qqadssp/DSOD-Pytorch | 76d7558bb20e7289ebf6acf50be4cef75d568499 | [
"MIT"
] | 5 | 2018-10-13T17:06:51.000Z | 2020-06-20T03:48:57.000Z | from torchcv.transform.resize import resize
from torchcv.transform.random_flip import random_flip
from torchcv.transform.random_crop import random_crop
from torchcv.transform.random_paste import random_paste
from torchcv.transform.scale_jitter import scale_jitter
from torchcv.transform.random_distort import random_distort
| 46.285714 | 59 | 0.888889 | 46 | 324 | 6.043478 | 0.26087 | 0.23741 | 0.431655 | 0.374101 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.074074 | 324 | 6 | 60 | 54 | 0.926667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
16109dac6caa232d99e70a8a0ba5ccca516ed41f | 82,086 | py | Python | scripts/train_lightgbm_model.py | guanjue/imputed_cistrome_2022 | 223efd1e76ce547b480c13f36d0df913ee1f0e5f | [
"MIT"
] | null | null | null | scripts/train_lightgbm_model.py | guanjue/imputed_cistrome_2022 | 223efd1e76ce547b480c13f36d0df913ee1f0e5f | [
"MIT"
] | null | null | null | scripts/train_lightgbm_model.py | guanjue/imputed_cistrome_2022 | 223efd1e76ce547b480c13f36d0df913ee1f0e5f | [
"MIT"
] | 1 | 2022-01-13T23:14:27.000Z | 2022-01-13T23:14:27.000Z | #!/usr/bin/env python3
import gc
import os
import pickle
import fire
import h5py
import matplotlib.pyplot as plt
import seaborn as sns
from hyperopt.fmin import generate_trials_to_calculate
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import precision_recall_curve
from numpy import linalg as LA
import sklearn.metrics as metrics
import json
import lightgbm as lgb
import numpy as np
import pandas as pd
import glob
from sklearn.preprocessing import QuantileTransformer
import yaml
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
from sklearn.metrics import average_precision_score
from early_stopping_avg import early_stopping
from hyperopt import STATUS_OK
from hyperopt import hp
from timeit import default_timer as timer
import numpy as np
from hyperopt import tpe
from hyperopt import Trials
from hyperopt import fmin
def focal_isoform_binary_object(pred, dtrain, alpha=0.5, beta=0.0, gamma=2.0):
# alpha controls weight of positives
# (0,1) less
# >1 more or(0-0.5 less, 0.5-1 more)
# beta controls the shift of loss function
# >0 to left(less weight to well-trained samples)
# gamma controls the steepness of loss function
# >0
label = dtrain.get_label()
x = beta + (2.0 * label - 1) * gamma * pred
p = 1. / (1. + np.exp(-x))
# grad = (1 + (alpha - 1) * label) * (2 * label - 1) * (p - 1)
grad = (1 - label + (label * 2 - 1) * alpha) * (2 * label - 1) * (p - 1)
# hess = (1 + (alpha - 1) * label) * gamma * (1 - p) * p
hess = (1 - label + (label * 2 - 1) * alpha) * gamma * (1 - p) * p
return grad, hess
def lgb_auprc_score(y_hat, data):
y_true = data.get_label()
# TODO try not to round yhat
# y_hat = np.round(y_hat) # scikits f1 doesn't like probabilities
return 'auprc', average_precision_score(y_true, y_hat), True
class LightGBMModel(object):
def __init__(self, config_file, training_tf_name=None,
cofactor_motif_set_file=None, quantile_transformer_path=None, dnase_feature_path=None,
motif_feature_path=None, selected_motif_feature_path=None, step=120):
with open(config_file, "r") as infile:
config = yaml.load(infile, Loader=Loader)
self.config = config
self.chrom_all = config['chrom_all']
self.region_topic_model_h5 = config['region_topic_model_h5']
self.dic_chrom_length = {}
self.chrom_sets = config['chrom_sets']
self.training_tf_name = training_tf_name
self.dic_chrom_length = {}
with open(config['chrom_size_file'], "r") as infile:
for line in infile:
line = line.strip().split("\t")
if line[0] in self.chrom_all:
self.dic_chrom_length[line[0]] = int(line[1])
# if regions_all_file is not None:
# self.df_all_regions = pd.read_csv(regions_all_file, sep="\t", header=None)
# self.df_all_regions.columns = ['chr', 'start', 'stop']
# else:
# self.df_all_regions = None
if training_tf_name is not None:
self.df_all_regions_label = pd.read_csv(
"%s/%s.%s" % (
config['training_cell_types_regions_label_path'], training_tf_name,
config['training_cell_types_regions_label_name']),
sep="\t", header=0)
else:
self.df_all_regions_label = None
if cofactor_motif_set_file is not None:
with open(cofactor_motif_set_file, "r") as infile:
self.cofactor_motif_set = json.load(infile)
else:
self.cofactor_motif_set = None
if quantile_transformer_path is None:
self.quantile_transformer_path = "./train/quantile_transformer"
else:
self.quantile_transformer_path = quantile_transformer_path
if dnase_feature_path is None:
self.dnase_feature_path = "./hdf5s/DNase"
else:
self.dnase_feature_path = dnase_feature_path
if motif_feature_path is None:
self.motif_feature_path = "./hdf5s/motif"
else:
self.motif_feature_path = motif_feature_path
if selected_motif_feature_path is None:
self.selected_motif_feature_path = "./hdf5s/motif"
else:
self.selected_motif_feature_path = selected_motif_feature_path
self.step = step
def prepare_motif_h5_data(self, chrom):
df_temp = self.df_all_regions_label[self.df_all_regions_label['chr'] == chrom].copy()
df_temp = df_temp.iloc[:, :3]
with h5py.File("%s/%s_motifs_top4_scores.h5" % (self.motif_feature_path, chrom), "r") as infile:
motif_names = infile['motif_names'][...]
motif_names = list(map(lambda x: x.decode('UTF-8'), motif_names))
# if selected_tfs is None:
# selected_tfs = motif_names
# selected_tfs=["EGR","KLF","SPI",'ETV','ZNF','GABP']
# row_indexs = [i for i, v in enumerate(motif_names) if any([tf_name in v for tf_name in selected_tfs])]
# selected_tfs_names = [v for i, v in enumerate(motif_names) if
# any([tf_name in v for tf_name in selected_tfs])]
row_index = [i for i, v in enumerate(motif_names) if v in self.cofactor_motif_set]
selected_motifs = [motif_names[i] for i in row_index]
# print(row_index)
scores = infile["scores"][row_index, :, :]
# for i in [-13,-11,-9,-7,-5,-3,-1,0,1,3,5,7,9,11,13]:
for i in [-7, -5, -3, -1, 0, 1, 3, 5, 7]:
# print("%s %d" % (chrom, i))
region_index = np.array(list(map(lambda x: x / 50 + i, df_temp["start"])))
region_index = np.clip(region_index, 0, scores.shape[1] - 1)
scores_region = scores[:, region_index.astype(int), :]
for ind, j in enumerate(selected_motifs):
# for ind, j in enumerate(self.cofactor_motif_set):
for k in range(4):
df_temp["%s_offset_%d_top_%d" % (j, i, k)] = scores_region[ind, :, k]
with h5py.File('%s/%s_motif_features_lightGBM.h5' % (self.selected_motif_feature_path, chrom), "w") as outfile:
outfile.create_dataset("feature_names", data=np.array(df_temp.iloc[:, 3:].columns, dtype='S'),
shape=(df_temp.shape[1] - 3,),
dtype='S200', compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.create_dataset("starts", data=df_temp['start'].tolist(), shape=(df_temp.shape[0],),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.create_dataset("scores", data=df_temp.iloc[:, 3:].values, dtype=np.float32,
shape=(df_temp.shape[0], df_temp.shape[1] - 3),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=4)
def prepare_dnase_autoencoder_h5_data(self, cell_line, chrom, outfile_path):
df_temp = self.df_all_regions_label[self.df_all_regions_label['chr'] == chrom].copy()
df_temp = df_temp.iloc[:, :3]
# for cell_line in self.config['cell_types']:
with h5py.File(
"%s/DNASE.%s.merge.binSize.1.corrected_sorted_hg19_25bpbin_bwaverage_transformed_%s_scanned_with_autoencoder_v4.hdf5" % (
'/n/scratchlfs/xiaoleliu_lab/cchen/Cistrome_imputation/encode/data/DNase_scanning/scan_result',
cell_line, chrom), "r") as infile:
# print(row_index)
scores = infile["DNase_feature_scanning"][:, :]
# for i in [-13,-11,-9,-7,-5,-3,-1,0,1,3,5,7,9,11,13]:
for i in [-12, -8, -4, 0, 4, 8, 12]:
# print("%s %d" % (chrom, i))
region_index = np.array(list(map(lambda x: x / 50 + i, df_temp["start"])))
region_index = np.clip(region_index, 0, scores.shape[0] - 1)
scores_region = scores[region_index.astype(int), :]
for k in range(32):
df_temp["DNase_autoencoder_offset_%d_%d" % (i, k)] = scores_region[:, k]
with h5py.File('%s/DNASE_autoencoder_lightGBM.%s.%s.h5' % (outfile_path, chrom, cell_line), "w") as outfile:
outfile.create_dataset("feature_names", data=np.array(df_temp.iloc[:, 3:].columns, dtype='S'),
shape=(df_temp.shape[1] - 3,),
dtype='S200', compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.create_dataset("starts", data=df_temp['start'].tolist(), shape=(df_temp.shape[0],),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.create_dataset("scores", data=df_temp.iloc[:, 3:].values, dtype=np.float32,
shape=(df_temp.shape[0], df_temp.shape[1] - 3),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=4)
def get_dnase_features(self, cell_line, chrom, dir_dnase_feature_median, selected_bin_index_file=None):
with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_all_cell_types.h5" % (
self.dnase_feature_path, chrom), "r") as infile:
samples = list(infile['samples'][...])
cell_line = str(cell_line)
samples = list(map(lambda x: x.decode('UTF-8'), samples))
cell_line_index = np.where(np.array(samples) == cell_line)[0][0]
if selected_bin_index_file is None:
cell_line_scores = infile[chrom][cell_line_index, :, :]
else:
selected_bin_index = np.load(selected_bin_index_file)
cell_line_scores = infile[chrom][cell_line_index, selected_bin_index, :]
# with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_median.h5" % (
# dir_dnase_feature_median, chrom), "r") as infile:
# if selected_bin_index_file is None:
# median_scores = infile[chrom][:, :]
# else:
# selected_bin_index = np.load(selected_bin_index_file)
# median_scores = infile[chrom][selected_bin_index, :]
# scores = np.hstack((cell_line_scores, median_scores))
# return scores
return cell_line_scores
def get_dnase_features_autoencoder(self, cell_line, chrom, feature_path, selected_bin_index_file=None):
with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_all_cell_types.h5" % (
self.dnase_feature_path, chrom), "r") as infile:
size = infile[chrom].shape[1]
with h5py.File("%s/DNASE_autoencoder_lightGBM.%s.%s.h5" % (feature_path, chrom, cell_line), "r") as infile:
if selected_bin_index_file is None:
cell_line_scores = infile['scores'][:size, :]
else:
selected_bin_index = np.load(selected_bin_index_file)
cell_line_scores = infile['scores'][:size, :]
# with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_median.h5" % (
# dir_dnase_feature_median, chrom), "r") as infile:
# if selected_bin_index_file is None:
# median_scores = infile[chrom][:, :]
# else:
# selected_bin_index = np.load(selected_bin_index_file)
# median_scores = infile[chrom][selected_bin_index, :]
# scores = np.hstack((cell_line_scores, median_scores))
# return scores
return cell_line_scores
def prepare_lightgbm_binary_dnase_feature(self, cell_line, chrom_set_name, dir_dnase_feature_median, dir_out,
reference=None, selected_bin_index_file=None, ATAC_long_short=False):
cell_line = str(cell_line)
chrom = "chr19"
# TODO change to 50bp or 100bp
with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_all_cell_types.h5" % (
self.dnase_feature_path, chrom), "r") as infile:
needed_feature_names = list(infile['feature_names'][...])
needed_feature_names = list(map(lambda x: x.decode('UTF-8'), needed_feature_names))
# with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_median.h5" % (
# # dir_dnase_feature_median, chrom), "r") as infile:
# # median_feature_names = list(infile['feature_names'][...])
# # median_feature_names = list(map(lambda x: x.decode('UTF-8'), median_feature_names))
# # needed_feature_names += median_feature_names
list_scores = []
chrom_set = self.chrom_sets[chrom_set_name]
if not ATAC_long_short:
for chrom in chrom_set:
scores = self.get_dnase_features(cell_line, chrom, dir_dnase_feature_median, selected_bin_index_file)
list_scores.append(scores)
# print(cell_line, chrom, subset_index)
all_score = np.vstack(list_scores)
# TODO change to 50bp or 100bp
with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, cell_line),
'rb') as fin:
qt = pickle.load(fin, encoding='latin1')
_ = qt.transform(all_score)
# _ = qt.transform(all_score[:, :int(all_score.shape[1] / 2)])
# _ = qt.transform(all_score[:, :cell_line_scores.shape[1]])
if reference is not None:
# reference = lgb.Dataset(glob.glob("%s/lightGBM.dnase.*.*.bin" % reference)[0])
reference = lgb.Dataset(reference)
train_data = lgb.Dataset(all_score, feature_name=list(needed_feature_names), reference=reference)
else:
list_scores_short_long = []
for frag_size in ['short','long']:
list_scores = []
for chrom in chrom_set:
scores = self.get_dnase_features("%s_%s" % (cell_line, frag_size), chrom, dir_dnase_feature_median,
selected_bin_index_file)
list_scores.append(scores)
# print(cell_line, chrom, subset_index)
all_score = np.vstack(list_scores)
# TODO change to 50bp or 100bp
with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, "%s_%s" % (cell_line, frag_size)),
'rb') as fin:
qt = pickle.load(fin, encoding='latin1')
_ = qt.transform(all_score)
# _ = qt.transform(all_score[:, :int(all_score.shape[1] / 2)])
# _ = qt.transform(all_score[:, :cell_line_scores.shape[1]])
list_scores_short_long.append(all_score)
all_score_short_long = np.hstack(list_scores_short_long)
if reference is not None:
# reference = lgb.Dataset(glob.glob("%s/lightGBM.dnase.*.*.bin" % reference)[0])
reference = lgb.Dataset(reference)
needed_feature_names_short_long = ['%s_%s' % (feature_name, frag_size) for frag_size in ['short','long']
for feature_name in needed_feature_names
]
train_data = lgb.Dataset(all_score_short_long, feature_name=list(needed_feature_names_short_long), reference=reference)
train_data.save_binary("%s/lightGBM.dnase.%s.%s.bin" % (dir_out, cell_line, chrom_set_name))
def prepare_lightgbm_binary_dnase_feature_autoencoder(self, cell_line, chrom_set_name, feature_path,
dir_out,
reference=None, selected_bin_index_file=None):
list_scores = []
chrom_set = self.chrom_sets[chrom_set_name]
for chrom in chrom_set:
scores = self.get_dnase_features_autoencoder(cell_line, chrom, feature_path,
selected_bin_index_file)
list_scores.append(scores)
# print(cell_line, chrom, subset_index)
all_score = np.vstack(list_scores)
if reference is not None:
reference = lgb.Dataset(glob.glob("%s/lightGBM.autoencoder.dnase.*.*.bin" % reference)[0])
needed_feature_names = []
for i in [-12, -8, -4, 0, 4, 8, 12]:
for k in range(32):
needed_feature_names.append("DNase_autoencoder_offset_%d_%d" % (i, k))
train_data = lgb.Dataset(all_score, feature_name=list(needed_feature_names), reference=reference)
train_data.save_binary("%s/lightGBM.autoencoder.dnase.%s.%s.bin" % (dir_out, cell_line, chrom_set_name))
def prepare_lightgbm_binary_data_motif_feature_subset(self, chrom_set_name, subset_index, dir_out,
selected_bin_index_file=None, reference=None):
chrom = "chr19"
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
all_feature_names = list(infile['feature_names'][...])
all_feature_names = list(map(lambda x: x.decode('UTF-8'), all_feature_names))
needed_feature_names = [all_feature_names[i:i + self.step]
for i in range(0, len(all_feature_names), self.step)][subset_index - 1]
feature_index = [list(range(i, min(i + self.step, len(all_feature_names)))) for i in
range(0, len(all_feature_names), self.step)][subset_index - 1]
# needed_feature_names = list(map(lambda x: x.decode('UTF-8'), needed_feature_names))
list_scores = []
chrom_set = self.chrom_sets[chrom_set_name]
with h5py.File(self.region_topic_model_h5, "r") as region_topic_infile:
for chrom in chrom_set:
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
# feature_names = list(infile['feature_names'][...])
# feature_names = list(map(lambda x: x.decode('UTF-8'), feature_names))
# feature_index = [i for i, v in enumerate(feature_names) if (v in needed_feature_names)]
if selected_bin_index_file is None:
scores = infile['scores'][:, feature_index]
if subset_index == 1:
scores = np.hstack([region_topic_infile[chrom][:, :], scores])
else:
selected_bin_index = np.load(selected_bin_index_file)
scores = infile['scores'][selected_bin_index, feature_index]
if subset_index == 1:
scores = np.hstack([region_topic_infile[chrom][selected_bin_index, :], scores])
list_scores.append(scores)
# print(cell_line, chrom, subset_index)
all_score = np.vstack(list_scores)
if reference is not None:
reference = lgb.Dataset(glob.glob("%s/lightGBM.motif.*.%d.bin" % (reference, subset_index))[0])
if subset_index == 1:
# needed_feature_names = ["topic_%d" % topic_id for topic_id in range(9)] \
# + needed_feature_names
# train_data = lgb.Dataset(all_score, categorical_feature=[8],
# feature_name=list(needed_feature_names), reference=reference)
needed_feature_names = ["topic_%d" % topic_id for topic_id in range(1)] \
+ needed_feature_names
train_data = lgb.Dataset(all_score[:, 8:],
categorical_feature=[0],
feature_name=list(needed_feature_names), reference=reference)
else:
train_data = lgb.Dataset(all_score, feature_name=list(needed_feature_names), reference=reference)
train_data.save_binary("%s/lightGBM.motif.%s.%d.bin" % (dir_out, chrom_set_name, subset_index))
# def merge_lightgbm_binary_data(self, cell_line, chrom_set_name, dir_out):
# all_feature_names = []
# chrom = "chr22"
# # TODO change to 50bp or 100bp
# # with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_all_cell_types.h5" % (
# # self.dnase_feature_path, chrom), "r") as infile:
# # all_feature_names += list(infile['feature_names'][...])
# # chrom = "chr22"
# with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
# "r") as infile:
# all_feature_names += list(infile['feature_names'][...])
# all_feature_names = list(map(lambda x: x.decode('UTF-8'), all_feature_names))
# # for cell_line in self.df_all_regions_label.columns.tolist()[3:]:
# for cell_line in [cell_line]:
# train_data_all = None
# for subset_index in range(int(np.ceil(len(all_feature_names) / self.step) + 1)):
# train_data = lgb.Dataset("%s/lightGBM.%s.%s.%d.bin" %
# (dir_out, cell_line, chrom_set_name, subset_index)).construct()
# if train_data_all is None:
# train_data_all = train_data
# else:
# # train_data_all=train_data_all.add_features_from(train_data)
# train_data_all.add_features_from(train_data)
# # print(subset_index)
# train_data_all.save_binary("%s/lightGBM_all.%s.%s.bin" % (dir_out, cell_line, chrom_set_name))
# print(cell_line, chrom_set_name)
def merge_lightgbm_binary_data(self, cell_line, chrom_set_name, dir_out=None, lightgbm_dnase_binary_files_path=None,
lightgbm_motif_binary_files_path=None):
if dir_out is None:
dir_out = "./train/%s/binary_files" % self.training_tf_name
if lightgbm_motif_binary_files_path is None:
lightgbm_motif_binary_files_path = "./train/%s/binary_files" % self.training_tf_name
if lightgbm_dnase_binary_files_path is None:
lightgbm_dnase_binary_files_path = "./train/data/dnase_feature_binary_files"
cell_line = str(cell_line)
all_feature_names = []
chrom = "chr19"
# TODO change to 50bp or 100bp
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
all_feature_names += list(infile['feature_names'][...])
all_feature_names = list(map(lambda x: x.decode('UTF-8'), all_feature_names))
train_data_all = lgb.Dataset("%s/lightGBM.dnase.%s.%s.bin" %
(lightgbm_dnase_binary_files_path, cell_line, chrom_set_name)).construct()
for subset_index in range(int(np.ceil(len(all_feature_names) / self.step))):
train_data = lgb.Dataset("%s/lightGBM.motif.%s.%d.bin" %
(lightgbm_motif_binary_files_path, chrom_set_name, subset_index + 1)).construct()
train_data_all.add_features_from(train_data)
temp = []
chrom_set = self.chrom_sets[chrom_set_name]
for chrom in chrom_set:
df_temp = self.df_all_regions_label.loc[self.df_all_regions_label['chr'] == chrom, :]
temp.append(df_temp)
df_all_temp = pd.concat(temp, ignore_index=True)
# selected_index = np.where(df_all_temp[cell_line] != "A")[0]
# ignore_index = np.where(df_all_temp[cell_line] == "A")[0]
# label_b_u = np.delete(np.array(df_all_temp[cell_line]), ignore_index, axis=0)
# labels = list(map(lambda x: 1 if x == "B" else 0, label_b_u))
# train_data_all_subset = train_data_all.subset(selected_index)
# train_data_all_subset.set_label(labels)
# return train_data_all_subset
weight = (np.array(df_all_temp[cell_line]) != "A").astype(int)
train_data_all.set_weight(weight)
labels = (np.array(df_all_temp[cell_line]) == "B").astype(int)
train_data_all.set_label(labels)
# return train_data_all
train_data_all.save_binary("%s/lightGBM.all.%s.%s.bin" % (dir_out, cell_line, chrom_set_name))
def merge_lightgbm_binary_data_autoencoder(self, cell_line, chrom_set_name, dir_out=None,
lightgbm_dnase_binary_files_path=None,
lightgbm_motif_binary_files_path=None):
if dir_out is None:
dir_out = "./train/%s/binary_files" % self.training_tf_name
if lightgbm_motif_binary_files_path is None:
lightgbm_motif_binary_files_path = "./train/%s/binary_files" % self.training_tf_name
if lightgbm_dnase_binary_files_path is None:
lightgbm_dnase_binary_files_path = "./train/data/dnase_feature_binary_files"
all_feature_names = []
chrom = "chr19"
# TODO change to 50bp or 100bp
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
all_feature_names += list(infile['feature_names'][...])
all_feature_names = list(map(lambda x: x.decode('UTF-8'), all_feature_names))
train_data_all = lgb.Dataset("%s/lightGBM.autoencoder.dnase.%s.%s.bin" %
(lightgbm_dnase_binary_files_path, cell_line, chrom_set_name)).construct()
# train_data = lgb.Dataset("%s/lightGBM.dnase.%s.%s.bin" %
# (lightgbm_dnase_binary_files_path, cell_line, chrom_set_name)).construct()
# train_data_all.add_features_from(train_data)
for subset_index in range(int(np.ceil(len(all_feature_names) / self.step))):
train_data = lgb.Dataset("%s/lightGBM.motif.%s.%d.bin" %
(lightgbm_motif_binary_files_path, chrom_set_name, subset_index + 1)).construct()
train_data_all.add_features_from(train_data)
temp = []
chrom_set = self.chrom_sets[chrom_set_name]
for chrom in chrom_set:
df_temp = self.df_all_regions_label.loc[self.df_all_regions_label['chr'] == chrom, :]
temp.append(df_temp)
df_all_temp = pd.concat(temp, ignore_index=True)
# selected_index = np.where(df_all_temp[cell_line] != "A")[0]
# ignore_index = np.where(df_all_temp[cell_line] == "A")[0]
# label_b_u = np.delete(np.array(df_all_temp[cell_line]), ignore_index, axis=0)
# labels = list(map(lambda x: 1 if x == "B" else 0, label_b_u))
# train_data_all_subset = train_data_all.subset(selected_index)
# train_data_all_subset.set_label(labels)
# return train_data_all_subset
weight = (np.array(df_all_temp[cell_line]) != "A").astype(int)
train_data_all.set_weight(weight)
labels = (np.array(df_all_temp[cell_line]) == "B").astype(int)
train_data_all.set_label(labels)
# return train_data_all
train_data_all.save_binary("%s/lightGBM.autoencoder.all.%s.%s.bin" % (dir_out, cell_line, chrom_set_name))
def train_models(self, cell_line, chrom_set_name, lightgbm_dnase_binary_files_path=None,
lightgbm_motif_binary_files_path=None, dir_out=None, num_threads=16):
if lightgbm_motif_binary_files_path is None:
lightgbm_motif_binary_files_path = "./train/%s/binary_files" % self.training_tf_name
if lightgbm_dnase_binary_files_path is None:
lightgbm_dnase_binary_files_path = "./train/data/dnase_feature_binary_files"
if dir_out is None:
dir_out = "./train/%s/models/" % self.training_tf_name
cell_line = str(cell_line)
params = {
'boosting_type': 'gbdt',
# 'boosting_type': 'dart',
# 'drop_rate': 0.3,
# 'max_drop': 50,
# 'skip_drop': 0.5,
# 'drop_seed': 6,
# 'pos_bagging_fraction': 1,
# 'neg_bagging_fraction': 0.01,
# 'bagging_freq': 10000,
# 'bagging_seed': 6,
'objective': 'binary',
# 'objective': focal_binary_object,
# 'metric': ['binary_error', 'binary_logloss', "auc"],
'metric': ["auc"],
# 'is_unbalance': True,
# "scale_pos_weight": 100,
'metric_freq': 10,
'num_leaves': 63,
# 'num_leaves': 7,
# 'max_bin': 255,
'num_threads': num_threads,
'learning_rate': 0.05,
'feature_fraction': 1,
'boost_from_average': False,
'verbose': 1
}
# other_set_type = list(set(self.chrom_sets.keys()) - {chrom_set_name})[0]
if len(self.df_all_regions_label.columns[3:]) > 1:
other_cell_lines = list(set(self.df_all_regions_label.columns[3:]) - {cell_line})
else:
other_cell_lines = [self.df_all_regions_label.columns[3]]
# train_data = self.merge_lightgbm_binary_data(cell_line, chrom_set_name, lightgbm_dnase_binary_files_path,
# lightgbm_motif_binary_files_path)
train_data = lgb.Dataset(
"%s/lightGBM.all.%s.%s.bin" % (lightgbm_motif_binary_files_path, cell_line, chrom_set_name))
list_validation_data = []
for other_cell_line in other_cell_lines:
# validation_data = self.merge_lightgbm_binary_data(other_cell_line, "chrom_set_test",
# lightgbm_dnase_binary_files_path,
# lightgbm_motif_binary_files_path)
validation_data = lgb.Dataset("%s/lightGBM.all.%s.%s.bin" % (
lightgbm_motif_binary_files_path, other_cell_line, "chrom_set_test"), reference=train_data)
list_validation_data.append(validation_data)
evals_result = {}
train_data = train_data.construct()
# see: https://arxiv.org/pdf/1909.04868.pdf
beta = - np.log10(2 * train_data.num_data()/np.where(train_data.get_label() > 0)[0].shape[0] - 1)
gbm = lgb.train(params=params,
train_set=train_data,
fobj=lambda x, y: focal_isoform_binary_object(x, y, alpha=0.5, beta=beta, gamma=1),
# fobj=lambda x,y:logistic_obj(x,y,imbalance_alpha=1.0),
valid_sets=[train_data] + list_validation_data,
valid_names=['train'] + ["%s_%s" % (other_cell_line, "set_test") \
for other_cell_line in other_cell_lines],
feval=lgb_auprc_score,
# early_stopping_rounds=20,
evals_result=evals_result,
num_boost_round=200,
keep_training_booster=False,
callbacks=[early_stopping(20, first_metric_only=False, verbose=True)]
)
with open("%s/%s.%s.%s_model.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name), 'wb') as fout:
pickle.dump(gbm, fout)
with open("%s/%s.%s.%s_evals_result.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name),
'wb') as outfile_evals_result:
pickle.dump(evals_result, outfile_evals_result, pickle.HIGHEST_PROTOCOL)
def train_models_hyperopt(self, cell_line, chrom_set_name, lightgbm_dnase_binary_files_path=None,
lightgbm_motif_binary_files_path=None, dir_out=None, num_threads=16):
if lightgbm_motif_binary_files_path is None:
lightgbm_motif_binary_files_path = "./train/%s/binary_files" % self.training_tf_name
if lightgbm_dnase_binary_files_path is None:
lightgbm_dnase_binary_files_path = "./train/data/dnase_feature_binary_files"
if dir_out is None:
dir_out = "./train/%s/models/" % self.training_tf_name
# other_set_type = list(set(self.chrom_sets.keys()) - {chrom_set_name})[0]
if len(self.df_all_regions_label.columns[3:]) > 1:
other_cell_lines = list(set(self.df_all_regions_label.columns[3:]) - {cell_line})
else:
other_cell_lines = [self.df_all_regions_label.columns[3]]
# train_data = self.merge_lightgbm_binary_data(cell_line, chrom_set_name, lightgbm_dnase_binary_files_path,
# lightgbm_motif_binary_files_path)
train_data = lgb.Dataset(
"%s/lightGBM.all.%s.%s.bin" % (lightgbm_motif_binary_files_path, cell_line, chrom_set_name))
list_validation_data = []
for other_cell_line in other_cell_lines:
# validation_data = self.merge_lightgbm_binary_data(other_cell_line, "chrom_set_test",
# lightgbm_dnase_binary_files_path,
# lightgbm_motif_binary_files_path)
validation_data = lgb.Dataset("%s/lightGBM.all.%s.%s.bin" % (
lightgbm_motif_binary_files_path, other_cell_line, "chrom_set_test"), reference=train_data)
list_validation_data.append(validation_data)
def hyperopt_objective(argsDict):
"""Objective function for Gradient Boosting Machine Hyperparameter Optimization"""
# Keep track of evals
global ITERATION
ITERATION += 1
global bayes_trials
with open("%s/%s.%s.%s_bayes_trials.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name), 'wb') as fout:
pickle.dump(bayes_trials, fout)
# Make sure parameters that need to be integers are integers
for parameter_name in ['num_leaves',
'min_data_in_leaf',
# 'max_depth'
]:
argsDict[parameter_name] = int(argsDict[parameter_name])
start = timer()
params = {
'boosting_type': 'gbdt',
# 'ignore_column': list(range(500)),
# 'boosting_type': 'dart',
# 'drop_rate': 0.3,
# 'max_drop': 50,
# 'skip_drop': 0.5,
# 'drop_seed': 6,
# 'pos_bagging_fraction': 0.001,
# 'neg_bagging_fraction': 0.001,
# 'bagging_freq': 10000,
# 'bagging_seed': 6,
'objective': 'binary',
# 'objective': focal_binary_object,
# 'metric': ['binary_error', 'binary_logloss', "auc"],
'metric': ["auc"],
# 'first_metric_only': True,
# 'is_unbalance': True,
# "scale_pos_weight": 100,
# 'feature_fraction_bynode': True,
'metric_freq': 10,
'num_leaves': argsDict['num_leaves'],
'min_data_in_leaf': argsDict['min_data_in_leaf'],
# 'min_data_in_leaf': 20,
# 'max_depth': argsDict['max_depth'],
# 'min_sum_hessian_in_leaf': argsDict['min_sum_hessian_in_leaf'],
# 'bagging_fraction': argsDict['bagging_fraction'],
# 'feature_fraction': argsDict['feature_fraction'],
# 'lambda_l1': argsDict['lambda_l1'],
# 'lambda_l2': argsDict['lambda_l2'],
# 'max_bin': 255,
'num_threads': num_threads,
# 'learning_rate': argsDict['learning_rate'],
'learning_rate': 0.1,
'bagging_freq': 1,
'boost_from_average': False,
'verbose': 1
}
evals_result = {}
valid_names = ['train'] + ["%s_%s" % (other_cell_line, "set_test") \
for other_cell_line in other_cell_lines]
gbm = lgb.train(params,
train_set=train_data,
fobj=lambda x, y: focal_isoform_binary_object(x, y,
# alpha=float(
# np.clip(argsDict['alpha'], 0.001, 0.999)),
alpha=1. / (1. + np.exp(
-argsDict['alpha_isoform'])),
beta=argsDict['beta'],
gamma=argsDict['gamma']),
valid_sets=[train_data] + list_validation_data,
valid_names=valid_names,
feval=lgb_auprc_score,
num_boost_round=300,
# early_stopping_rounds=20,
evals_result=evals_result,
keep_training_booster=False,
callbacks=[early_stopping(20, first_metric_only=False, verbose=True)],
)
run_time = timer() - start
# Extract the best score
# best_score = np.max(cv_results['auprc-mean'])
auprc_sum = None
n = 0
for valid_name in valid_names:
if valid_name != "train":
if auprc_sum is None:
auprc_sum = np.array(evals_result[valid_name]['auprc'])
else:
auprc_sum += np.array(evals_result[valid_name]['auprc'])
n += 1
best_score = np.max(auprc_sum / n)
# Loss must be minimized
loss = 1 - best_score
# Boosting rounds that returned the highest cv score
# n_estimators = int(np.argmax(cv_results['auprc-mean']) + 1)
n_estimators = int(np.argmax(auprc_sum) + 1)
print('auprc:{} ITERATION:{} n_estimators:{} run_time:{}'.format(best_score, ITERATION, n_estimators,
run_time),
end="\n")
# Dictionary with information for evaluation
return {'loss': loss,
'params': argsDict,
'iteration': ITERATION,
'estimators': n_estimators,
'gbm': gbm,
'evals_result': evals_result,
'train_time': run_time,
'status': STATUS_OK}
# return loss
# Define the search space
space = {
# 'class_weight': hp.choice('class_weight', [None, 'balanced']),
'num_leaves': hp.qloguniform('num_leaves', np.log(15), np.log(1023), 5),
# 'max_depth': hp.quniform('max_depth', 3, 63, 1),
# 'min_sum_hessian_in_leaf': hp.loguniform('min_sum_hessian_in_leaf', np.log(0.001), np.log(1)),
# 'learning_rate': hp.loguniform('learning_rate', np.log(0.001), np.log(0.2)),
'min_data_in_leaf': hp.quniform('min_data_in_leaf', 20, 1000, 5),
# 'lambda_l1': hp.uniform('lambda_l1', 0.0, 1.0),
# 'lambda_l2': hp.uniform('lambda_l2', 0.0, 1.0),
# 'bagging_fraction': hp.uniform('bagging_fraction', 0.4, 1.0),
# 'feature_fraction': hp.uniform('feature_fraction', 0.4, 1.0),
# 'alpha': hp.loguniform('alpha', np.log(1), np.log(100)),
# 'alpha': hp.normal('alpha', 0.5, 0.15),
'alpha_isoform': hp.normal('alpha_isoform', 0, 3),
'beta': hp.uniform('beta', -10, 10),
'gamma': hp.loguniform('gamma', np.log(1), np.log(20)),
}
# Keep track of results
global bayes_trials
# bayes_trials = Trials()
bayes_trials_file_path = "%s/%s.%s.%s_bayes_trials.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name)
if os.path.exists(bayes_trials_file_path):
with open(bayes_trials_file_path, 'rb') as fin:
bayes_trials = pickle.load(fin, encoding='latin1')
else:
bayes_trials = generate_trials_to_calculate(
[{'num_leaves': 63, 'min_data_in_leaf': 20, 'alpha_isoform': 0, 'beta': -1.5, 'gamma': 1.01}])
# Global variable
global ITERATION
ITERATION = 0
# Run optimization
best = fmin(fn=hyperopt_objective, space=space, algo=tpe.suggest,
max_evals=len(bayes_trials.tids)+30, trials=bayes_trials, rstate=np.random.RandomState(6))
# Sort the trials with lowest loss (highest AUC) first
bayes_trials_results = sorted(bayes_trials.results, key=lambda x: x['loss'])
# bayes_trials_results[:10]
with open("%s/%s.%s.%s_model.hyperopt.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name), 'wb') as fout:
pickle.dump(bayes_trials_results[0]['gbm'], fout)
with open("%s/%s.%s.%s_evals_result.hyperopt.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name),
'wb') as outfile_evals_result:
pickle.dump(bayes_trials_results[0]['evals_result'], outfile_evals_result, pickle.HIGHEST_PROTOCOL)
with open("%s/%s.%s.%s_bayes_trials.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name), 'wb') as fout:
pickle.dump(bayes_trials, fout)
def train_models_autoencoder(self, cell_line, chrom_set_name, lightgbm_dnase_binary_files_path=None,
lightgbm_motif_binary_files_path=None, dir_out=None, num_threads=16):
if lightgbm_motif_binary_files_path is None:
lightgbm_motif_binary_files_path = "./train/%s/binary_files" % self.training_tf_name
if lightgbm_dnase_binary_files_path is None:
lightgbm_dnase_binary_files_path = "./train/data/dnase_feature_binary_files"
if dir_out is None:
dir_out = "./train/%s/models/" % self.training_tf_name
params = {
'boosting_type': 'gbdt',
# 'boosting_type': 'dart',
# 'drop_rate': 0.3,
# 'max_drop': 50,
# 'skip_drop': 0.5,
# 'drop_seed': 6,
# 'pos_bagging_fraction': 1,
# 'neg_bagging_fraction': 0.01,
# 'bagging_freq': 10000,
# 'bagging_seed': 6,
'objective': 'binary',
# 'objective': focal_binary_object,
# 'metric': ['binary_error', 'binary_logloss', "auc"],
'metric': ["auc"],
# 'is_unbalance': True,
# "scale_pos_weight": 100,
'metric_freq': 10,
'num_leaves': 63,
# 'max_bin': 255,
'num_threads': num_threads,
'learning_rate': 0.1,
'feature_fraction': 1,
'boost_from_average': False,
'verbose': 1
}
# other_set_type = list(set(self.chrom_sets.keys()) - {chrom_set_name})[0]
if len(self.df_all_regions_label.columns[3:]) > 1:
other_cell_lines = list(set(self.df_all_regions_label.columns[3:]) - {cell_line})
else:
other_cell_lines = [self.df_all_regions_label.columns[3]]
# train_data = self.merge_lightgbm_binary_data(cell_line, chrom_set_name, lightgbm_dnase_binary_files_path,
# lightgbm_motif_binary_files_path)
train_data = lgb.Dataset(
"%s/lightGBM.autoencoder.all.%s.%s.bin" % (lightgbm_motif_binary_files_path, cell_line, chrom_set_name))
list_validation_data = []
for other_cell_line in other_cell_lines:
# validation_data = self.merge_lightgbm_binary_data(other_cell_line, "chrom_set_test",
# lightgbm_dnase_binary_files_path,
# lightgbm_motif_binary_files_path)
validation_data = lgb.Dataset("%s/lightGBM.autoencoder.all.%s.%s.bin" % (
lightgbm_motif_binary_files_path, other_cell_line, "chrom_set_test"), reference=train_data)
list_validation_data.append(validation_data)
evals_result = {}
gbm = lgb.train(params=params,
train_set=train_data,
fobj=lambda x, y: focal_isoform_binary_object(x, y, alpha=0.5, beta=-1.5, gamma=1.01),
# fobj=lambda x,y:logistic_obj(x,y,imbalance_alpha=1.0),
valid_sets=[train_data] + list_validation_data,
valid_names=['train'] + ["%s_%s" % (other_cell_line, "set_test") \
for other_cell_line in other_cell_lines],
feval=lgb_auprc_score,
early_stopping_rounds=20,
evals_result=evals_result,
keep_training_booster=False)
with open("%s/%s.%s.%s_model.autoencoder.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name), 'wb') as fout:
pickle.dump(gbm, fout)
with open("%s/%s.%s.%s_evals_result.autoencoder.pkl" % (
dir_out, self.training_tf_name, cell_line, chrom_set_name),
'wb') as outfile_evals_result:
pickle.dump(evals_result, outfile_evals_result, pickle.HIGHEST_PROTOCOL)
def make_prediction(self, cell_line, chrom, dir_dnase_feature_median=None, lightgbm_model_files_path=None,
dir_out=None):
if dir_dnase_feature_median is None:
dir_dnase_feature_median = "./hdf5s/DNase/median"
if lightgbm_model_files_path is None:
lightgbm_model_files_path = "./train/%s/models/" % self.training_tf_name
if dir_out is None:
dir_out = "./train/%s/predictions/" % self.training_tf_name
cell_line = str(cell_line)
scores = self.get_dnase_features(cell_line, chrom, dir_dnase_feature_median)
with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, cell_line),
'rb') as fin:
qt = pickle.load(fin, encoding='latin1')
# _ = qt.transform(scores[:, :int(scores.shape[1] / 2)])
_ = qt.transform(scores)
with h5py.File(self.region_topic_model_h5, "r") as region_topic_infile:
scores_topic = region_topic_infile[chrom][...]
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
scores_motif = infile['scores'][...]
scores_all = np.hstack((scores, scores_topic[:, 8:], scores_motif))
# model_files = glob.glob("%s/%s_*_model.pkl" % (lightgbm_model_files_path, self.training_tf_name))
model_files = ["%s/%s.%s.%s_model.pkl" % (
lightgbm_model_files_path, self.training_tf_name, training_cell_line, training_chrom_set_name)
for training_cell_line in list(self.df_all_regions_label.columns[3:])
for training_chrom_set_name in sorted(list(set(self.chrom_sets.keys()) - {'chrom_set_test'}))]
model_files = np.array(model_files, dtype='S')
preds = np.zeros((scores_all.shape[0], len(model_files)))
for ind_model_file, model_file in enumerate(model_files):
with open(model_file, 'rb') as fin:
gbm = pickle.load(fin, encoding='latin1')
ypred = gbm.predict(scores_all)
preds[:, ind_model_file] = ypred
with h5py.File('%s/%s.%s.%s_preds.h5' % (dir_out, self.training_tf_name, cell_line, chrom),
"w") as outfile:
outfile.create_dataset("model_files", data=model_files,
shape=(len(model_files),),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.create_dataset("preds", data=preds,
shape=preds.shape,
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
def make_prediction_hyperopt(self, cell_line, chrom, dir_dnase_feature_median=None, lightgbm_model_files_path=None,
dir_out=None):
if dir_dnase_feature_median is None:
dir_dnase_feature_median = "./hdf5s/DNase/median"
if lightgbm_model_files_path is None:
lightgbm_model_files_path = "./train/%s/models/" % self.training_tf_name
if dir_out is None:
dir_out = "./train/%s/predictions/" % self.training_tf_name
scores = self.get_dnase_features(cell_line, chrom, dir_dnase_feature_median)
with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, cell_line),
'rb') as fin:
qt = pickle.load(fin, encoding='latin1')
# _ = qt.transform(scores[:, :int(scores.shape[1] / 2)])
_ = qt.transform(scores)
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
scores_motif = infile['scores'][...]
scores_all = np.hstack((scores, scores_motif))
# model_files = glob.glob("%s/%s_*_model.pkl" % (lightgbm_model_files_path, self.training_tf_name))
model_files = ["%s/%s.%s.%s_model.hyperopt.pkl" % (
lightgbm_model_files_path, self.training_tf_name, training_cell_line, training_chrom_set_name)
for training_cell_line in list(self.df_all_regions_label.columns[3:])
for training_chrom_set_name in sorted(list(set(self.chrom_sets.keys()) - {'chrom_set_test'}))]
model_files = np.array(model_files, dtype='S')
preds = np.zeros((scores_all.shape[0], len(model_files)))
for ind_model_file, model_file in enumerate(model_files):
with open(model_file, 'rb') as fin:
gbm = pickle.load(fin, encoding='latin1')
ypred = gbm.predict(scores_all)
preds[:, ind_model_file] = ypred
with h5py.File('%s/%s.%s.%s_preds.hyperopt.h5' % (dir_out, self.training_tf_name, cell_line, chrom),
"w") as outfile:
outfile.create_dataset("model_files", data=model_files,
shape=(len(model_files),),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.create_dataset("preds", data=preds,
shape=preds.shape,
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
def make_prediction_leaf(self, cell_line, chrom, dir_dnase_feature_median=None, lightgbm_model_files_path=None,
dir_out=None):
if dir_dnase_feature_median is None:
dir_dnase_feature_median = "./hdf5s/DNase/median"
if lightgbm_model_files_path is None:
lightgbm_model_files_path = "./train/%s/models/" % self.training_tf_name
if dir_out is None:
dir_out = "./train/%s/predictions/" % self.training_tf_name
scores = self.get_dnase_features(cell_line, chrom, dir_dnase_feature_median)
with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, cell_line),
'rb') as fin:
qt = pickle.load(fin, encoding='latin1')
# _ = qt.transform(scores[:, :int(scores.shape[1] / 2)])
_ = qt.transform(scores)
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
scores_motif = infile['scores'][...]
scores_all = np.hstack((scores, scores_motif))
# model_files = glob.glob("%s/%s_*_model.pkl" % (lightgbm_model_files_path, self.training_tf_name))
model_files = ["%s/%s.%s.%s_model.pkl" % (
lightgbm_model_files_path, self.training_tf_name, training_cell_line, training_chrom_set_name)
for training_cell_line in list(self.df_all_regions_label.columns[3:])
for training_chrom_set_name in sorted(list(set(self.chrom_sets.keys()) - {'chrom_set_test'}))]
with h5py.File('%s/%s.%s.%s_pred_leafs.h5' % (dir_out, self.training_tf_name, cell_line, chrom),
"w") as outfile:
for ind_model_file, model_file in enumerate(model_files):
with open(model_file, 'rb') as fin:
gbm = pickle.load(fin, encoding='latin1')
leafs = gbm.predict(scores_all, raw_score=False, pred_leaf=True, pred_contrib=False)
leaf_outputs = np.zeros(leafs.shape)
for i in range(leafs.shape[0]):
for j in range(leafs.shape[1]):
leaf_outputs[i, j] = gbm.get_leaf_output(j, leafs[i, j])
gc.collect()
outfile.create_dataset("%s/%s" % (model_file.split("/")[-1], cell_line),
data=leaf_outputs,
shape=leaf_outputs.shape,
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.flush()
training_cell_line = model_file.split("/")[-1].split('.')[1]
source_scores = self.get_dnase_features(training_cell_line, chrom, dir_dnase_feature_median)
with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, training_cell_line),
'rb') as fin:
qt = pickle.load(fin, encoding='latin1')
# _ = qt.transform(scores[:, :int(scores.shape[1] / 2)])
_ = qt.transform(source_scores)
# source_scores_all = np.hstack((source_scores, scores_motif))
gc.collect()
leafs = gbm.predict(np.hstack((source_scores, scores_motif)), raw_score=False, pred_leaf=True,
pred_contrib=False)
leaf_outputs = np.zeros(leafs.shape)
for i in range(leafs.shape[0]):
for j in range(leafs.shape[1]):
leaf_outputs[i, j] = gbm.get_leaf_output(j, leafs[i, j])
outfile.create_dataset("%s/%s" % (model_file.split("/")[-1], training_cell_line),
data=leaf_outputs,
shape=leaf_outputs.shape,
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.flush()
model_files = np.array(model_files, dtype='S')
outfile.create_dataset("model_files", data=model_files,
shape=(len(model_files),),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
def make_prediction_autoencoder(self, cell_line, chrom, dir_dnase_feature_median=None,
lightgbm_model_files_path=None,
dir_out=None):
if dir_dnase_feature_median is None:
dir_dnase_feature_median = "./hdf5s/DNase/median"
if lightgbm_model_files_path is None:
lightgbm_model_files_path = "./train/%s/models/" % self.training_tf_name
if dir_out is None:
dir_out = "./train/%s/predictions/" % self.training_tf_name
scores_autoencoder = self.get_dnase_features_autoencoder(cell_line, chrom, './hdf5s/DNase')
# scores = self.get_dnase_features(cell_line, chrom, dir_dnase_feature_median)
# with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, cell_line),
# 'rb') as fin:
# qt = pickle.load(fin, encoding='latin1')
# # _ = qt.transform(scores[:, :int(scores.shape[1] / 2)])
# _ = qt.transform(scores)
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
scores_motif = infile['scores'][...]
# scores_all = np.hstack((scores_autoencoder, scores, scores_motif))
scores_all = np.hstack((scores_autoencoder, scores_motif))
# model_files = glob.glob("%s/%s_*_model.pkl" % (lightgbm_model_files_path, self.training_tf_name))
model_files = ["%s/%s.%s.%s_model.autoencoder.pkl" % (
lightgbm_model_files_path, self.training_tf_name, training_cell_line, training_chrom_set_name)
for training_cell_line in list(self.df_all_regions_label.columns[3:])
for training_chrom_set_name in sorted(list(set(self.chrom_sets.keys()) - {'chrom_set_test'}))]
model_files = np.array(model_files, dtype='S')
preds = np.zeros((scores_all.shape[0], len(model_files)))
for ind_model_file, model_file in enumerate(model_files):
with open(model_file, 'rb') as fin:
gbm = pickle.load(fin, encoding='latin1')
ypred = gbm.predict(scores_all)
preds[:, ind_model_file] = ypred
with h5py.File('%s/%s.%s.%s_preds.autoencoder.h5' % (dir_out, self.training_tf_name, cell_line, chrom),
"w") as outfile:
outfile.create_dataset("model_files", data=model_files,
shape=(len(model_files),),
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
outfile.create_dataset("preds", data=preds,
shape=preds.shape,
compression='gzip', shuffle=True, fletcher32=True, compression_opts=9)
def evaluation(self, cell_line, lightgbm_preds_files_path=None, dir_out=None):
if dir_out is None:
dir_out = "./train/%s/evaluations/" % self.training_tf_name
cell_line = str(cell_line)
df_test_regions_label = pd.read_csv(
"%s/%s.%s" % (
self.config['test_cell_types_regions_label_path'], self.training_tf_name,
self.config['test_cell_types_regions_label_name']), sep="\t", header=0)
list_preds_binary = []
# list_preds_binary_2 = []
list_labels = []
list_preds_matrix = []
list_chroms = []
list_starts = []
for chrom in self.chrom_all:
with h5py.File(
'%s/%s.%s.%s_preds.h5' % (lightgbm_preds_files_path, self.training_tf_name, cell_line, chrom),
"r") as infile:
model_files = infile['model_files'][...]
preds = infile['preds'][...]
labels = np.array(df_test_regions_label.loc[df_test_regions_label['chr'] == chrom, :][cell_line])
list_preds_matrix.append(preds)
preds_binary = np.mean(1. / (1. + np.exp(-preds)), axis=1)
# preds_binary_2 = 1. / (1. + np.exp(-np.mean(preds, axis=1)))
list_preds_binary.append(preds_binary)
# list_preds_binary_2.append(preds_binary_2)
list_labels.append(labels)
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
list_starts.append(infile['starts'][...])
list_chroms.append(np.array([chrom] * infile['starts'].shape[0]))
labels = np.hstack(list_labels)
preds = np.hstack(list_preds_binary)
# preds_2 = np.hstack(list_preds_binary_2)
preds_matrix = np.vstack(list_preds_matrix)
starts = np.hstack(list_starts)
chroms = np.hstack(list_chroms)
ignore_index = np.where(labels == "A")[0]
preds_matrix = np.delete(preds_matrix, ignore_index, axis=0)
preds = np.delete(preds, ignore_index, axis=0)
label_b_u = np.delete(labels, ignore_index, axis=0)
starts = np.delete(starts, ignore_index, axis=0)
chroms = np.delete(chroms, ignore_index, axis=0)
label_b_u = np.array(list(map(lambda x: 1 if x == "B" else 0, label_b_u)))
with open("%s/%s.%s_performance.txt" % (dir_out, self.training_tf_name, cell_line), "w") as outfile:
fpr, tpr, thresholds = metrics.roc_curve(label_b_u, preds, pos_label=1)
auc = metrics.auc(fpr, tpr)
auprc = average_precision_score(label_b_u, preds)
outfile.write("average model: auc:%.6f auprc:%.6f\n" % (auc, auprc))
temp = []
for i in range(preds_matrix.shape[1]):
fpr, tpr, thresholds = metrics.roc_curve(label_b_u, preds_matrix[:, i], pos_label=1)
auc = metrics.auc(fpr, tpr)
# auprc = average_precision_score(label_b_u, preds_matrix[:, i])
auprc = average_precision_score(label_b_u, 1. / (1. + np.exp(-preds_matrix[:, i])))
outfile.write("%s model: auc:%.6f auprc:%.6f\n" % (
model_files[i].decode().split("/")[-1].replace('_model.pkl', ''), auc, auprc))
precision, recall, thresholds = precision_recall_curve(label_b_u,
1. / (1. + np.exp(-preds_matrix[:, i])),
pos_label=1)
df_temp = pd.DataFrame(None)
df_temp["precision"] = precision
df_temp["recall"] = recall
df_temp["model"] = model_files[i].decode().split("/")[-1]
temp.append(df_temp.sample(n=min(100000, df_temp.shape[0])))
df_plot = pd.concat(temp, ignore_index=True)
plt.figure(figsize=(8, 6))
ax = sns.lineplot(x="recall", y="precision", data=df_plot, hue='model', palette="tab10")
ax.set_title("%s in %s" % (self.training_tf_name, cell_line))
ax.set_xlabel("Recall")
ax.set_ylabel("Precision")
ax.get_figure().savefig("%s/%s_%s_PRC.pdf" % (dir_out, self.training_tf_name, cell_line))
df_plot.to_csv(
"%s/df_plot.PRC.%s.xls" % (
dir_out, cell_line),
sep="\t", header=True, index=False)
with open("%s/%s.%s_confusion_matrix.txt" % (dir_out, self.training_tf_name, cell_line), "w") as outfile:
for i in range(preds_matrix.shape[1]):
one_preds = 1. / (1. + np.exp(-preds_matrix[:, i]))
cutoff = 0.5
true_positive = np.where((one_preds >= cutoff) & (label_b_u == 1))[0]
false_positive = np.where((one_preds >= cutoff) & (label_b_u == 0))[0]
false_negative = np.where((one_preds < cutoff) & (label_b_u == 1))[0]
outfile.write("%s model: all_regions:%d true_positive:%d false_positive:%d false_negative:%d\n" % (
model_files[i].decode().split("/")[-1].replace('_model.pkl', ''), len(one_preds),
len(true_positive), len(false_positive), len(false_negative)))
df = pd.DataFrame(None)
df["chrom"] = np.hstack((chroms[true_positive], chroms[false_positive], chroms[false_negative]))
df["start"] = np.hstack((starts[true_positive], starts[false_positive], starts[false_negative]))
df["preds"] = np.hstack(
(one_preds[true_positive], one_preds[false_positive], one_preds[false_negative]))
df["label"] = np.hstack(
(label_b_u[true_positive], label_b_u[false_positive], label_b_u[false_negative]))
df["class"] = ['true_positive'] * len(true_positive) + ['false_positive'] * len(false_positive) + [
'false_negative'] * len(false_negative)
df.to_csv(
"%s/df.%s.regions.%s.xls" % (
dir_out, model_files[i].decode().split("/")[-1].replace('_model.pkl', ''), cell_line),
sep="\t", header=True, index=False)
def evaluation_hyperopt(self, cell_line, lightgbm_preds_files_path=None, dir_out=None):
if dir_out is None:
dir_out = "./train/%s/evaluations/" % self.training_tf_name
df_test_regions_label = pd.read_csv(
"%s/%s.%s" % (
self.config['test_cell_types_regions_label_path'], self.training_tf_name,
self.config['test_cell_types_regions_label_name']), sep="\t", header=0)
list_preds_binary = []
# list_preds_binary_2 = []
list_labels = []
list_preds_matrix = []
list_chroms = []
list_starts = []
for chrom in self.chrom_all:
with h5py.File(
'%s/%s.%s.%s_preds.hyperopt.h5' % (lightgbm_preds_files_path, self.training_tf_name, cell_line, chrom),
"r") as infile:
model_files = infile['model_files'][...]
preds = infile['preds'][...]
labels = np.array(df_test_regions_label.loc[df_test_regions_label['chr'] == chrom, :][cell_line])
list_preds_matrix.append(preds)
preds_binary = np.mean(1. / (1. + np.exp(-preds)), axis=1)
# preds_binary_2 = 1. / (1. + np.exp(-np.mean(preds, axis=1)))
list_preds_binary.append(preds_binary)
# list_preds_binary_2.append(preds_binary_2)
list_labels.append(labels)
with h5py.File("%s/%s_motif_features_lightGBM.h5" % (self.selected_motif_feature_path, chrom),
"r") as infile:
list_starts.append(infile['starts'][...])
list_chroms.append(np.array([chrom] * infile['starts'].shape[0]))
labels = np.hstack(list_labels)
preds = np.hstack(list_preds_binary)
# preds_2 = np.hstack(list_preds_binary_2)
preds_matrix = np.vstack(list_preds_matrix)
starts = np.hstack(list_starts)
chroms = np.hstack(list_chroms)
ignore_index = np.where(labels == "A")[0]
preds_matrix = np.delete(preds_matrix, ignore_index, axis=0)
preds = np.delete(preds, ignore_index, axis=0)
label_b_u = np.delete(labels, ignore_index, axis=0)
starts = np.delete(starts, ignore_index, axis=0)
chroms = np.delete(chroms, ignore_index, axis=0)
label_b_u = np.array(list(map(lambda x: 1 if x == "B" else 0, label_b_u)))
with open("%s/%s.%s_performance.hyperopt.txt" % (dir_out, self.training_tf_name, cell_line), "w") as outfile:
fpr, tpr, thresholds = metrics.roc_curve(label_b_u, preds, pos_label=1)
auc = metrics.auc(fpr, tpr)
auprc = average_precision_score(label_b_u, preds)
outfile.write("average model: auc:%.6f auprc:%.6f\n" % (auc, auprc))
temp = []
for i in range(preds_matrix.shape[1]):
fpr, tpr, thresholds = metrics.roc_curve(label_b_u, preds_matrix[:, i], pos_label=1)
auc = metrics.auc(fpr, tpr)
# auprc = average_precision_score(label_b_u, preds_matrix[:, i])
auprc = average_precision_score(label_b_u, 1. / (1. + np.exp(-preds_matrix[:, i])))
outfile.write("%s model: auc:%.6f auprc:%.6f\n" % (
model_files[i].decode().split("/")[-1].replace('_model.pkl', ''), auc, auprc))
precision, recall, thresholds = precision_recall_curve(label_b_u, preds, pos_label=1)
df_temp = pd.DataFrame(None)
df_temp["precision"] = precision
df_temp["recall"] = recall
df_temp["model"] = model_files[i].decode().split("/")[-1]
temp.append(df_temp.sample(n=100000))
df_plot = pd.concat(temp, ignore_index=True)
plt.figure(figsize=(8, 6))
ax = sns.lineplot(x="recall", y="precision", data=df_plot, hue='model', palette="tab10")
ax.set_title("%s in %s" % (self.training_tf_name, cell_line))
ax.set_xlabel("Recall")
ax.set_ylabel("Precision")
ax.get_figure().savefig("%s/%s_%s_PRC.pdf" % (dir_out, self.training_tf_name, cell_line))
with open("%s/%s.%s_confusion_matrix.hyperopt.txt" % (dir_out, self.training_tf_name, cell_line), "w") as outfile:
for i in range(preds_matrix.shape[1]):
one_preds = 1. / (1. + np.exp(-preds_matrix[:, i]))
cutoff = 0.5
true_positive = np.where((one_preds >= cutoff) & (label_b_u == 1))[0]
false_positive = np.where((one_preds >= cutoff) & (label_b_u == 0))[0]
false_negative = np.where((one_preds < cutoff) & (label_b_u == 1))[0]
outfile.write("%s model: all_regions:%d true_positive:%d false_positive:%d false_negative:%d\n" % (
model_files[i].decode().split("/")[-1].replace('_model.pkl', ''), len(one_preds),
len(true_positive), len(false_positive), len(false_negative)))
df = pd.DataFrame(None)
df["chrom"] = np.hstack((chroms[true_positive], chroms[false_positive], chroms[false_negative]))
df["start"] = np.hstack((starts[true_positive], starts[false_positive], starts[false_negative]))
df["preds"] = np.hstack(
(one_preds[true_positive], one_preds[false_positive], one_preds[false_negative]))
df["label"] = np.hstack(
(label_b_u[true_positive], label_b_u[false_positive], label_b_u[false_negative]))
df["class"] = ['true_positive'] * len(true_positive) + ['false_positive'] * len(false_positive) + [
'false_negative'] * len(false_negative)
df.to_csv(
"%s/df.%s.regions.xls" % (
dir_out, model_files[i].decode().split("/")[-1].replace('_model.pkl', '')),
sep="\t", header=True, index=False)
@staticmethod
def bins_with_dnase_peak(cell_line):
f = os.popen(
'zcat /n/scratchlfs/xiaoleliu_lab/Jingyu/impute_cistrome/ENCODE_DREAM/DNASE/'
'peaks/conservative/DNASE.%s.conservative.narrowPeak.gz | bedtools sort -i stdin | '
'bedtools intersect -c -b stdin -a /n/scratchlfs/xiaoleliu_lab/Jingyu/'
'impute_cistrome/ENCODE_DREAM/annotations/test_regions.blacklistfiltered.sorted.bed' % cell_line)
df = pd.read_csv(f, sep="\t", header=None)
df.columns = ['chr', 'start', 'end', 'peak_num']
return df
def evaluation_leafs(self, cell_line, lightgbm_preds_files_path=None, dir_out=None):
if dir_out is None:
dir_out = "./train/%s/evaluations/" % self.training_tf_name
dic_model_cell_type_cov = {}
chrom = "chr19"
with h5py.File(
'%s/%s.%s.%s_pred_leafs.h5' % (lightgbm_preds_files_path, self.training_tf_name, cell_line, chrom),
"r") as infile:
model_files = infile['model_files'][...]
model_files = list(map(lambda x: x.decode(), model_files))
df_temp = self.bins_with_dnase_peak(cell_line)
for model_file in model_files:
list_pred_leafs = []
list_labels = []
for chrom in self.chrom_all:
with h5py.File(
'%s/%s.%s.%s_pred_leafs.h5' % (
lightgbm_preds_files_path, self.training_tf_name, cell_line, chrom),
"r") as infile:
pred_leafs = infile["%s/%s" % (model_file.split("/")[-1], cell_line)][...]
labels = np.array(df_temp.loc[df_temp['chr'] == chrom, :]['peak_num'])
list_pred_leafs.append(pred_leafs)
list_labels.append(labels)
labels = np.hstack(list_labels)
pred_leafs = np.vstack(list_pred_leafs)
ignore_index = np.where(labels == 0)[0]
pred_leafs = np.delete(pred_leafs, ignore_index, axis=0)
scaler = StandardScaler(copy=True, with_mean=True, with_std=True)
scaler.fit(pred_leafs)
pred_leafs_scaled = scaler.transform(pred_leafs)
dic_model_cell_type_cov[model_file.split("/")[-1]] = {}
dic_model_cell_type_cov[model_file.split("/")[-1]][cell_line] = np.cov(pred_leafs_scaled.T)
for model_file in model_files:
training_cell_line = model_file.split("/")[-1].split('.')[1]
df_temp = self.bins_with_dnase_peak(training_cell_line)
list_pred_leafs = []
list_labels = []
for chrom in self.chrom_all:
with h5py.File(
'%s/%s.%s.%s_pred_leafs.h5' % (
lightgbm_preds_files_path, self.training_tf_name, cell_line, chrom),
"r") as infile:
pred_leafs = infile["%s/%s" % (model_file.split("/")[-1], training_cell_line)][...]
labels = np.array(df_temp.loc[df_temp['chr'] == chrom, :]['peak_num'])
list_pred_leafs.append(pred_leafs)
list_labels.append(labels)
labels = np.hstack(list_labels)
pred_leafs = np.vstack(list_pred_leafs)
ignore_index = np.where(labels == 0)[0]
pred_leafs = np.delete(pred_leafs, ignore_index, axis=0)
scaler = StandardScaler(copy=True, with_mean=True, with_std=True)
scaler.fit(pred_leafs)
pred_leafs_scaled = scaler.transform(pred_leafs)
dic_model_cell_type_cov[model_file.split("/")[-1]][training_cell_line] = np.cov(pred_leafs_scaled.T)
with open("%s/%s.%s_models_fro_norm.txt" % (dir_out, self.training_tf_name, cell_line), "w") as outfile:
for model_file in model_files:
training_cell_line = model_file.split("/")[-1].split('.')[1]
outfile.write("model_name:%s fro_norm:%.6f\n" % (
model_file.split("/")[-1], LA.norm(
dic_model_cell_type_cov[model_file.split("/")[-1]][training_cell_line] -
dic_model_cell_type_cov[model_file.split("/")[-1]][cell_line], 'fro')))
def evaluation_autoencoder(self, cell_line, lightgbm_preds_files_path=None, dir_out=None):
if dir_out is None:
dir_out = "./train/%s/evaluations/" % self.training_tf_name
df_test_regions_label = pd.read_csv(
"%s/%s.%s" % (
self.config['test_cell_types_regions_label_path'], self.training_tf_name,
self.config['test_cell_types_regions_label_name']), sep="\t", header=0)
list_preds_binary = []
# list_preds_binary_2 = []
list_labels = []
list_preds_matrix = []
for chrom in self.chrom_all:
with h5py.File(
'%s/%s.%s.%s_preds.autoencoder.h5' % (
lightgbm_preds_files_path, self.training_tf_name, cell_line, chrom),
"r") as infile:
model_files = infile['model_files'][...]
preds = infile['preds'][...]
labels = np.array(df_test_regions_label.loc[df_test_regions_label['chr'] == chrom, :][cell_line])
list_preds_matrix.append(preds)
preds_binary = np.mean(1. / (1. + np.exp(-preds)), axis=1)
# preds_binary_2 = 1. / (1. + np.exp(-np.mean(preds, axis=1)))
list_preds_binary.append(preds_binary)
# list_preds_binary_2.append(preds_binary_2)
list_labels.append(labels)
labels = np.hstack(list_labels)
preds = np.hstack(list_preds_binary)
# preds_2 = np.hstack(list_preds_binary_2)
preds_matrix = np.vstack(list_preds_matrix)
ignore_index = np.where(labels == "A")[0]
preds_matrix = np.delete(preds_matrix, ignore_index, axis=0)
preds = np.delete(preds, ignore_index, axis=0)
label_b_u = np.delete(labels, ignore_index, axis=0)
label_b_u = np.array(list(map(lambda x: 1 if x == "B" else 0, label_b_u)))
with open("%s/%s.%s_performance.autoencoder.txt" % (dir_out, self.training_tf_name, cell_line), "w") as outfile:
fpr, tpr, thresholds = metrics.roc_curve(label_b_u, preds, pos_label=1)
auc = metrics.auc(fpr, tpr)
auprc = average_precision_score(label_b_u, preds)
outfile.write("average model: auc:%.6f auprc:%.6f\n" % (auc, auprc))
for i in range(preds_matrix.shape[1]):
fpr, tpr, thresholds = metrics.roc_curve(label_b_u, preds_matrix[:, i], pos_label=1)
auc = metrics.auc(fpr, tpr)
auprc = average_precision_score(label_b_u, preds_matrix[:, i])
outfile.write("%s model: auc:%.6f auprc:%.6f\n" % (
model_files[i].decode().split("/")[-1].replace('_model.pkl', ''), auc, auprc))
def prepare_lightgbm_binary_data_dnase_median(self, cell_line, chrom_set_name, dir_out, reference=None,
selected_bin_index_file=None):
subset_index = -1
chrom = "chr19"
# TODO change to 50bp or 100bp
with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_all_cell_types.h5" % (
self.dnase_feature_path, chrom), "r") as infile:
needed_feature_names = list(infile['feature_names'][...])
needed_feature_names = list(map(lambda x: "median_" + x.decode('UTF-8'), needed_feature_names))
list_scores = []
labels = []
chrom_set = self.chrom_sets[chrom_set_name]
for chrom in chrom_set:
df_temp = self.df_all_regions_label.loc[self.df_all_regions_label['chr'] == chrom, :]
with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_all_cell_types.h5" % (
self.dnase_feature_path, chrom),
"r") as infile:
if selected_bin_index_file is None:
scores = infile[chrom][:, :, :]
else:
selected_bin_index = np.load(selected_bin_index_file)
scores = infile[chrom][:, selected_bin_index, :]
samples = list(infile['samples'][...])
samples = list(map(lambda x: x.decode('UTF-8'), samples))
for cell_line_name in samples:
cell_line_index = np.where(np.array(samples) == cell_line_name)[0][0]
with open("%s/%s_variable_bp_quantile_map.pkl" % (self.quantile_transformer_path, cell_line_name),
'rb') as fin:
qt = pickle.load(fin, encoding='latin1')
_ = qt.transform(scores[cell_line_index, :, :])
scores_median = np.median(scores, axis=0)
ignore_index = np.where(df_temp[cell_line] == "A")[0]
scores_median = np.delete(scores_median, ignore_index, axis=0)
label_b_u = np.delete(np.array(df_temp[cell_line]), ignore_index, axis=0)
temp_label = list(map(lambda x: 1 if x == "B" else 0, label_b_u))
labels += temp_label
list_scores.append(scores_median)
# print(cell_line, chrom, subset_index)
all_score = np.vstack(list_scores)
if reference is not None:
reference = lgb.Dataset(glob.glob("%s/median/lightGBM.*.*.%d.bin" % (reference, subset_index))[0])
train_data = lgb.Dataset(all_score, feature_name=list(needed_feature_names), label=labels, reference=reference)
train_data.save_binary("%s/median/lightGBM.%s.%s.%d.bin" % (dir_out, cell_line, chrom_set_name, subset_index))
def merge_lightgbm_binary_data_median(self, cell_line, chrom_set_name, dir_out):
# TODO change to 50bp or 100bp
# with h5py.File("%s/DNASE_bam_5_mer_variable_bp_all_samples_lightGBM_%s_all_cell_types.h5" % (
# self.dnase_feature_path, chrom), "r") as infile:
# all_feature_names += list(infile['feature_names'][...])
# chrom = "chr22"
for cell_line in [cell_line]:
subset_index = -1
train_data_all = lgb.Dataset("%s/lightGBM_all.%s.%s.bin" % (dir_out, cell_line, chrom_set_name)).construct()
train_data = lgb.Dataset("%s/median/lightGBM.%s.%s.%d.bin" %
(dir_out, cell_line, chrom_set_name, subset_index)).construct()
train_data_all.add_features_from(train_data)
# print(subset_index)
train_data_all.save_binary("%s/median/lightGBM_all.%s.%s.bin" % (dir_out, cell_line, chrom_set_name))
if __name__ == '__main__':
fire.Fire(LightGBMModel)
| 58.716738 | 137 | 0.581074 | 10,249 | 82,086 | 4.32169 | 0.052103 | 0.034136 | 0.0226 | 0.025602 | 0.841803 | 0.816337 | 0.794708 | 0.778385 | 0.76809 | 0.740298 | 0 | 0.01571 | 0.299003 | 82,086 | 1,397 | 138 | 58.758769 | 0.754041 | 0.15714 | 0 | 0.670132 | 1 | 0 | 0.100679 | 0.057241 | 0 | 0 | 0 | 0.000716 | 0 | 1 | 0.02552 | false | 0 | 0.029301 | 0 | 0.061437 | 0.000945 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
16771f36717dc0a9441b1a53fe7968c99d5b19d2 | 15,233 | py | Python | hunittest/test/test_termlib.py | nicolasdespres/hunittest | 11215c4dbe03cc11fbb02b1e9b57fd6f93781e92 | [
"BSD-2-Clause"
] | null | null | null | hunittest/test/test_termlib.py | nicolasdespres/hunittest | 11215c4dbe03cc11fbb02b1e9b57fd6f93781e92 | [
"BSD-2-Clause"
] | null | null | null | hunittest/test/test_termlib.py | nicolasdespres/hunittest | 11215c4dbe03cc11fbb02b1e9b57fd6f93781e92 | [
"BSD-2-Clause"
] | null | null | null | # -*- encoding: utf-8 -*-
"""Test "hunittest.line_printer" package.
"""
import unittest
from hunittest.termlib import truncate_ansi_string
from hunittest.termlib import ansi_string_truncinfo
from hunittest.termlib import TermInfo
class TestTruncateAnsiString(unittest.TestCase):
def setUp(self):
super(TestTruncateAnsiString, self).setUp()
self.termnfo = TermInfo(color_mode="always")
def test_negative(self):
with self.assertRaises(ValueError):
ansi_string_truncinfo("", -1)
with self.assertRaises(ValueError):
truncate_ansi_string("", -1)
def test_none_string(self):
with self.assertRaises(ValueError):
ansi_string_truncinfo(None, 1)
with self.assertRaises(ValueError):
truncate_ansi_string(None, 1)
def test_empty_string(self):
self.assertEqual((0, 0, False), ansi_string_truncinfo("", 0))
self.assertEqual((0, 0, False), ansi_string_truncinfo("", 1))
self.assertEqual((0, 0, False), ansi_string_truncinfo("", 100))
self.assertEqual("", truncate_ansi_string("", 0))
self.assertEqual("", truncate_ansi_string("", 1))
self.assertEqual("", truncate_ansi_string("", 100))
def test_size_0_no_ansi(self):
q = "foobarblue"
a = ""
s = 0
self.assertEqual(a, truncate_ansi_string(q, s))
self.assertEqual((s, s, False),
ansi_string_truncinfo(q, s))
def test_size_1_no_ansi(self):
q = "foobarblue"
s = 1
a = "f"
self.assertEqual(a, truncate_ansi_string(q, s))
self.assertEqual((s, s, False),
ansi_string_truncinfo(q, s))
def test_size_3_no_ansi(self):
q = "foobarblue"
a = "foo"
s = 3
self.assertEqual(a, truncate_ansi_string(q, s))
self.assertEqual((s, s, False),
ansi_string_truncinfo(q, s))
def test_size_exceed_no_ansi(self):
text = "foobarblue"
s = 10000
self.assertEqual(text, truncate_ansi_string(text, s))
self.assertEqual((len(text), len(text), False),
ansi_string_truncinfo(text, s))
def test_size_0_ansi_first(self):
fixture = self.termnfo.fore_red+"text"
size = 0
expected = ""
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((0, 0, False),
ansi_string_truncinfo(fixture, size))
def test_size_1_ansi_first(self):
fixture = self.termnfo.fore_red+"text"
size = 1
expected = self.termnfo.fore_red+"t"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), 1, True),
ansi_string_truncinfo(fixture, size))
def test_size_2_ansi_first(self):
fixture = self.termnfo.fore_red+"text"
size = 2
expected = self.termnfo.fore_red+"te"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_3_ansi_first(self):
fixture = self.termnfo.fore_red+"text"
size = 3
expected = self.termnfo.fore_red+"tex"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_exceed_ansi_first(self):
fixture = self.termnfo.fore_red+"text"
expected = self.termnfo.fore_red+"text"
actual = truncate_ansi_string(fixture, 4000)
self.assertEqual(expected, actual)
def test_size_0_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"red"
size = 0
expected = ""
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, False),
ansi_string_truncinfo(fixture, size))
def test_size_1_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"red"
size = 1
expected = "1"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, False),
ansi_string_truncinfo(fixture, size))
def test_size_2_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"red"
size = 2
expected = "12"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, False),
ansi_string_truncinfo(fixture, size))
def test_size_3_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"red"
size = 3
expected = "123"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, False),
ansi_string_truncinfo(fixture, size))
def test_size_4_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"456"
size = 4
expected = "123"+self.termnfo.fore_red+"4"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_5_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"456"
size = 5
expected = "123"+self.termnfo.fore_red+"45"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_6_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"456"
size = 6
expected = "123"+self.termnfo.fore_red+"456"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_exceed_ansi_middle(self):
fixture = "123"+self.termnfo.fore_red+"456"
size = 10000
expected = "123"+self.termnfo.fore_red+"456"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), 6, True),
ansi_string_truncinfo(fixture, size))
def test_size_0_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"
size = 0
expected = ""
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, False),
ansi_string_truncinfo(fixture, size))
def test_size_1_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"
size = 1
expected = self.termnfo.fore_blue+"1"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_2_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"
size = 2
expected = self.termnfo.fore_blue+"12"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_3_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"
size = 3
expected = self.termnfo.fore_blue+"123"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_4_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"
size = 4
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"4"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_5_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"
size = 5
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"45"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_6_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"
size = 6
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_exceed_ansi_first_middle(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"
size = 1000
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), 6, True),
ansi_string_truncinfo(fixture, size))
def test_size_0_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"+self.termnfo.fore_green
size = 0
expected = ""
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, False),
ansi_string_truncinfo(fixture, size))
def test_size_1_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"+self.termnfo.fore_green
size = 1
expected = self.termnfo.fore_blue+"1"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_2_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"+self.termnfo.fore_green
size = 2
expected = self.termnfo.fore_blue+"12"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_3_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"red"+self.termnfo.fore_green
size = 3
expected = self.termnfo.fore_blue+"123"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_4_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"+self.termnfo.fore_green
size = 4
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"4"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_5_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"+self.termnfo.fore_green
size = 5
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"45"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_6_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"+self.termnfo.fore_green
size = 6
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
def test_size_7_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"+self.termnfo.fore_green
size = 7
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"+self.termnfo.fore_green
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), 6, True),
ansi_string_truncinfo(fixture, size))
def test_size_exceed_ansi_first_middle_last(self):
fixture = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"+self.termnfo.fore_green
size = 1000
expected = self.termnfo.fore_blue+"123"+self.termnfo.fore_red+"456"+self.termnfo.fore_green
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), 6, True),
ansi_string_truncinfo(fixture, size))
def test_use_case(self):
fixture = "[ 50%|253.71|"+self.termnfo.fore_green+"2"+self.termnfo.reset_all+"|"+self.termnfo.fore_red+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_magenta+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_blue+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_yellow+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_cyan+"0"+self.termnfo.reset_all+"]"+self.termnfo.fore_green+" SUCCESS "+self.termnfo.reset_all+": testtest.test_allgood.Case1.test_success2 (0:00:00.252080)"
size = 50
expected = "[ 50%|253.71|"+self.termnfo.fore_green+"2"+self.termnfo.reset_all+"|"+self.termnfo.fore_red+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_magenta+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_blue+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_yellow+"0"+self.termnfo.reset_all+"|"+self.termnfo.fore_cyan+"0"+self.termnfo.reset_all+"]"+self.termnfo.fore_green+" SUCCESS "+self.termnfo.reset_all+": testtest.test_"
actual = truncate_ansi_string(fixture, size)
self.assertEqual(expected, actual)
self.assertEqual((len(expected), size, True),
ansi_string_truncinfo(fixture, size))
| 44.281977 | 484 | 0.642158 | 1,862 | 15,233 | 5.009667 | 0.050483 | 0.14033 | 0.167238 | 0.094554 | 0.929889 | 0.908233 | 0.894511 | 0.894511 | 0.859241 | 0.850772 | 0 | 0.030219 | 0.239677 | 15,233 | 343 | 485 | 44.411079 | 0.775168 | 0.004136 | 0 | 0.717172 | 0 | 0 | 0.030403 | 0.002704 | 0 | 0 | 0 | 0 | 0.265993 | 1 | 0.131313 | false | 0 | 0.013468 | 0 | 0.148148 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
167d148d9d0142542c59ad5d0456bc18758f6bd0 | 23,580 | py | Python | api/applications/tests/tests_edit_end_use_details.py | django-doctor/lite-api | 1ba278ba22ebcbb977dd7c31dd3701151cd036bf | [
"MIT"
] | 3 | 2019-05-15T09:30:39.000Z | 2020-04-22T16:14:23.000Z | api/applications/tests/tests_edit_end_use_details.py | django-doctor/lite-api | 1ba278ba22ebcbb977dd7c31dd3701151cd036bf | [
"MIT"
] | 85 | 2019-04-24T10:39:35.000Z | 2022-03-21T14:52:12.000Z | api/applications/tests/tests_edit_end_use_details.py | django-doctor/lite-api | 1ba278ba22ebcbb977dd7c31dd3701151cd036bf | [
"MIT"
] | 1 | 2021-01-17T11:12:19.000Z | 2021-01-17T11:12:19.000Z | from django.urls import reverse
from parameterized import parameterized
from rest_framework import status
from api.audit_trail.enums import AuditType
from api.audit_trail.models import Audit
from api.cases.enums import CaseTypeEnum
from lite_content.lite_api import strings
from api.staticdata.statuses.enums import CaseStatusEnum
from api.staticdata.statuses.libraries.get_case_status import get_case_status_by_status
from test_helpers.clients import DataTestClient
class EditStandardApplicationTests(DataTestClient):
def setUp(self):
super().setUp()
self.data = {"name": "new app name!"}
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True, "reference_number": "48953745ref"}],
[{"key": "informed_wmd", "value": True, "reference_number": "48953745ref"}],
[{"key": "suspected_wmd", "value": True, "reference_number": "48953745ref"}],
]
)
def test_edit_unsubmitted_standard_application_end_use_details(self, attributes):
application = self.create_draft_standard_application(self.organisation)
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = "is_" + attributes["key"]
value = attributes["value"]
data = {key: value}
reference_key = attributes["key"] + "_ref"
data[reference_key] = attributes["reference_number"]
updated_at = application.updated_at
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
attribute = getattr(application, key)
self.assertEqual(attribute, value)
self.assertNotEqual(application.updated_at, updated_at)
# Unsubmitted (draft) applications should not create audit entries when edited
self.assertEqual(Audit.objects.count(), 0)
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True, "reference_number": ""}],
[{"key": "informed_wmd", "value": True, "reference_number": ""}],
[{"key": "suspected_wmd", "value": True, "reference_number": ""}],
]
)
def test_edit_unsubmitted_standard_application_end_use_details_mandatory_ref_is_empty(self, attributes):
application = self.create_draft_standard_application(self.organisation)
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = "is_" + attributes["key"]
value = attributes["value"]
data = {key: value}
old_attribute = getattr(application, key)
reference_key = attributes["key"] + "_ref"
data[reference_key] = attributes["reference_number"]
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"][reference_key],
[strings.Applications.Generic.EndUseDetails.Error.MISSING_DETAILS],
)
attribute = getattr(application, key)
self.assertEqual(attribute, old_attribute)
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True}],
[{"key": "informed_wmd", "value": True}],
[{"key": "suspected_wmd", "value": True}],
]
)
def test_edit_unsubmitted_standard_application_end_use_details_mandatory_ref_is_missing(self, attributes):
application = self.create_draft_standard_application(self.organisation)
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = "is_" + attributes["key"]
value = attributes["value"]
data = {key: value}
reference_key = attributes["key"] + "_ref"
old_attribute = getattr(application, key)
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
(response.json()["errors"][reference_key]),
[strings.Applications.Generic.EndUseDetails.Error.MISSING_DETAILS],
)
attribute = getattr(application, key)
self.assertEqual(attribute, old_attribute)
@parameterized.expand(
[
[
{
"key": "military_end_use_controls",
"value": "",
"error": strings.Applications.Generic.EndUseDetails.Error.INFORMED_TO_APPLY,
}
],
[
{
"key": "informed_wmd",
"value": "",
"error": strings.Applications.Generic.EndUseDetails.Error.INFORMED_WMD,
}
],
[
{
"key": "suspected_wmd",
"value": "",
"error": strings.Applications.Generic.EndUseDetails.Error.SUSPECTED_WMD,
}
],
]
)
def test_edit_unsubmitted_standard_application_end_use_details_mandatory_field_is_empty(self, attributes):
application = self.create_draft_standard_application(self.organisation)
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = "is_" + attributes["key"]
value = attributes["value"]
data = {key: value}
old_attribute = getattr(application, key)
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(response.json()["errors"][key], [attributes["error"]])
attribute = getattr(application, key)
self.assertEqual(attribute, old_attribute)
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True, "reference_number": "hadd"}],
[{"key": "informed_wmd", "value": True, "reference_number": "kjjdnsk"}],
[{"key": "suspected_wmd", "value": True, "reference_number": "kjndskhjds"}],
]
)
def test_edit_submitted_standard_application_end_use_details_major_editable(self, attributes):
application = self.create_standard_application_case(self.organisation)
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = "is_" + attributes["key"]
value = attributes["value"]
reference_key = attributes["key"] + "_ref"
reference_value = attributes["reference_number"]
data = {key: value, reference_key: reference_value}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
attribute = getattr(application, key)
self.assertEqual(attribute, value)
self.assertEqual(Audit.objects.count(), 3)
@parameterized.expand(
[
[{"key": "is_military_end_use_controls", "value": True, "reference_number": "hadd"}],
[{"key": "is_informed_wmd", "value": True, "reference_number": "kjjdnsk"}],
[{"key": "is_suspected_wmd", "value": True, "reference_number": "kjndskhjds"}],
]
)
def test_edit_submitted_standard_application_end_use_details_not_major_editable(self, attributes):
application = self.create_standard_application_case(self.organisation)
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = attributes["key"]
value = attributes["value"]
data = {key: value}
old_attribute = getattr(application, key)
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"]["non_field_errors"],
[strings.Applications.Generic.INVALID_OPERATION_FOR_NON_DRAFT_OR_MAJOR_EDIT_CASE_ERROR],
)
attribute = getattr(application, key)
self.assertEqual(attribute, old_attribute)
def test_edit_standard_submitted_application_end_use_details_is_compliant_limitations_eu(self):
application = self.create_standard_application_case(self.organisation)
application.is_eu_military = True
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
data = {
"is_compliant_limitations_eu": False,
"compliant_limitations_eu_ref": "24524f",
}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(application.is_compliant_limitations_eu, data["is_compliant_limitations_eu"])
self.assertEqual(application.compliant_limitations_eu_ref, data["compliant_limitations_eu_ref"])
self.assertEqual(Audit.objects.count(), 3)
def test_edit_standard_application_end_use_details_is_compliant_limitations_eu_is_empty(self):
application = self.create_draft_standard_application(self.organisation)
application.is_eu_military = True
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
data = {
"is_compliant_limitations_eu": "",
"compliant_limitations_eu_ref": "",
}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"]["is_compliant_limitations_eu"],
[strings.Applications.Generic.EndUseDetails.Error.COMPLIANT_LIMITATIONS_EU],
)
def test_edit_standard_application_end_use_details_intended_end_use(self):
application = self.create_draft_standard_application(self.organisation)
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
data = {
"intended_end_use": "this is the intended end use",
}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(application.intended_end_use, data["intended_end_use"])
self.assertEqual(Audit.objects.count(), 1)
def test_edit_standard_application_end_use_details_intended_end_use_is_empty(self):
application = self.create_draft_standard_application(self.organisation)
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
data = {
"intended_end_use": "",
}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"]["intended_end_use"],
[strings.Applications.Generic.EndUseDetails.Error.INTENDED_END_USE],
)
class EditOpenApplicationTests(DataTestClient):
def setUp(self):
super().setUp()
self.application = self.create_draft_open_application(self.organisation)
self.url = reverse("applications:end_use_details", kwargs={"pk": self.application.id})
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True, "reference_number": "48953745ref"}],
[{"key": "informed_wmd", "value": True, "reference_number": "48953745ref"}],
[{"key": "suspected_wmd", "value": True, "reference_number": "48953745ref"}],
]
)
def test_edit_unsubmitted_open_application_end_use_details(self, attributes):
key = "is_" + attributes["key"]
value = attributes["value"]
data = {key: value}
if "reference_number" in attributes:
reference_key = attributes["key"] + "_ref"
data[reference_key] = attributes["reference_number"]
updated_at = self.application.updated_at
response = self.client.put(self.url, data, **self.exporter_headers)
self.application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
attribute = getattr(self.application, key)
self.assertEqual(attribute, value)
self.assertNotEqual(self.application.updated_at, updated_at)
# Unsubmitted (draft) applications should not create audit entries when edited
self.assertEqual(Audit.objects.count(), 0)
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True, "reference_number": ""}],
[{"key": "informed_wmd", "value": True, "reference_number": ""}],
[{"key": "suspected_wmd", "value": True, "reference_number": ""}],
]
)
def test_edit_unsubmitted_open_application_end_use_details_mandatory_ref_is_empty(self, attributes):
key = "is_" + attributes["key"]
old_attribute = getattr(self.application, key)
value = attributes["value"]
data = {key: value}
reference_key = attributes["key"] + "_ref"
data[reference_key] = attributes["reference_number"]
response = self.client.put(self.url, data, **self.exporter_headers)
self.application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"][reference_key],
[strings.Applications.Generic.EndUseDetails.Error.MISSING_DETAILS],
)
attribute = getattr(self.application, key)
self.assertEqual(attribute, old_attribute)
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True}],
[{"key": "informed_wmd", "value": True}],
[{"key": "suspected_wmd", "value": True}],
]
)
def test_edit_unsubmitted_open_application_end_use_details_mandatory_ref_is_missing(self, attributes):
key = "is_" + attributes["key"]
old_attribute = getattr(self.application, key)
value = attributes["value"]
data = {key: value}
reference_key = attributes["key"] + "_ref"
response = self.client.put(self.url, data, **self.exporter_headers)
self.application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
(response.json()["errors"][reference_key]),
[strings.Applications.Generic.EndUseDetails.Error.MISSING_DETAILS],
)
attribute = getattr(self.application, key)
self.assertEqual(attribute, old_attribute)
@parameterized.expand(
[
[
{
"key": "military_end_use_controls",
"value": "",
"error": strings.Applications.Generic.EndUseDetails.Error.INFORMED_TO_APPLY,
}
],
[
{
"key": "informed_wmd",
"value": "",
"error": strings.Applications.Generic.EndUseDetails.Error.INFORMED_WMD,
}
],
[
{
"key": "suspected_wmd",
"value": "",
"error": strings.Applications.Generic.EndUseDetails.Error.SUSPECTED_WMD,
}
],
]
)
def test_edit_unsubmitted_open_application_end_use_details_mandatory_field_is_empty(self, attributes):
key = "is_" + attributes["key"]
old_attribute = getattr(self.application, key)
value = attributes["value"]
data = {key: value}
response = self.client.put(self.url, data, **self.exporter_headers)
self.application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(response.json()["errors"][key], [attributes["error"]])
attribute = getattr(self.application, key)
self.assertEqual(attribute, old_attribute)
@parameterized.expand(
[
[{"key": "military_end_use_controls", "value": True, "reference_number": "hadd"}],
[{"key": "informed_wmd", "value": True, "reference_number": "kjjdnsk"}],
[{"key": "suspected_wmd", "value": True, "reference_number": "kjndskhjds"}],
]
)
def test_edit_submitted_open_application_end_use_details_major_editable(self, attributes):
application = self.create_draft_open_application(self.organisation)
self.submit_application(application)
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = "is_" + attributes["key"]
value = attributes["value"]
reference_key = attributes["key"] + "_ref"
reference_value = attributes["reference_number"]
data = {key: value, reference_key: reference_value}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
attribute = getattr(application, key)
self.assertEqual(attribute, value)
self.assertEqual(Audit.objects.count(), 3)
@parameterized.expand(
[
[{"key": "is_military_end_use_controls", "value": True, "reference_number": "hadd"}],
[{"key": "is_informed_wmd", "value": True, "reference_number": "kjjdnsk"}],
[{"key": "is_suspected_wmd", "value": True, "reference_number": "kjndskhjds"}],
]
)
def test_edit_submitted_open_application_end_use_details_not_major_editable(self, attributes):
application = self.create_draft_open_application(self.organisation)
self.submit_application(application)
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
key = attributes["key"]
old_attribute = getattr(self.application, key)
value = attributes["value"]
data = {key: value}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"]["non_field_errors"],
[strings.Applications.Generic.INVALID_OPERATION_FOR_NON_DRAFT_OR_MAJOR_EDIT_CASE_ERROR],
)
attribute = getattr(application, key)
self.assertEqual(attribute, old_attribute)
def test_edit_open_application_end_use_details_intended_end_use(self):
application = self.create_draft_open_application(self.organisation)
self.submit_application(application)
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
data = {
"intended_end_use": "this is the intended end use",
}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(application.intended_end_use, data["intended_end_use"])
self.assertEqual(Audit.objects.count(), 2)
def test_edit_open_application_end_use_details_intended_end_use_is_empty(self):
application = self.create_draft_open_application(self.organisation)
self.submit_application(application)
application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
application.save()
url = reverse("applications:end_use_details", kwargs={"pk": application.id})
data = {
"intended_end_use": "",
}
response = self.client.put(url, data, **self.exporter_headers)
application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"]["intended_end_use"],
[strings.Applications.Generic.EndUseDetails.Error.INTENDED_END_USE],
)
class EditF680ApplicationTests(DataTestClient):
def setUp(self):
super().setUp()
self.application = self.create_mod_clearance_application(self.organisation, case_type=CaseTypeEnum.F680)
self.url = reverse("applications:end_use_details", kwargs={"pk": self.application.id})
def test_edit_f680_application_end_use_details_intended_end_use(self):
self.application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
self.application.save()
data = {
"intended_end_use": "this is the intended end use",
}
response = self.client.put(self.url, data, **self.exporter_headers)
self.application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.application.intended_end_use, data["intended_end_use"])
self.assertEqual(Audit.objects.count(), 1)
def test_edit_f680_application_end_use_details_intended_end_use_is_empty_failure(self):
self.submit_application(self.application)
self.application.status = get_case_status_by_status(CaseStatusEnum.APPLICANT_EDITING)
self.application.save()
data = {
"intended_end_use": "",
}
response = self.client.put(self.url, data, **self.exporter_headers)
self.application.refresh_from_db()
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(len(response.json()["errors"]), 1)
self.assertEqual(
response.json()["errors"]["intended_end_use"],
[strings.Applications.Generic.EndUseDetails.Error.INTENDED_END_USE],
)
| 42.107143 | 112 | 0.654835 | 2,464 | 23,580 | 5.96388 | 0.060065 | 0.030623 | 0.031848 | 0.039197 | 0.942157 | 0.93508 | 0.931882 | 0.926438 | 0.912964 | 0.90541 | 0 | 0.007947 | 0.226209 | 23,580 | 559 | 113 | 42.182469 | 0.797435 | 0.006489 | 0 | 0.729847 | 0 | 0 | 0.124701 | 0.040386 | 0 | 0 | 0 | 0 | 0.154684 | 1 | 0.050109 | false | 0 | 0.021786 | 0 | 0.078431 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bc758b3925660fa4f41257839a076d4719a73367 | 2,805 | py | Python | src/day1.py | pgilad/advent-of-code-2017 | 4f5cf2541ba932417b10d18076d6d23043a83149 | [
"MIT"
] | 1 | 2017-12-02T12:33:12.000Z | 2017-12-02T12:33:12.000Z | src/day1.py | pgilad/advent-of-code-2017 | 4f5cf2541ba932417b10d18076d6d23043a83149 | [
"MIT"
] | null | null | null | src/day1.py | pgilad/advent-of-code-2017 | 4f5cf2541ba932417b10d18076d6d23043a83149 | [
"MIT"
] | null | null | null | input_string = "7385764686251444473997915123782972536343732657517834671759462795461213782428342931896181695578996274321317419242359534783957372932953774336338118488967172727651862498838195317654289797558683458511126996217953322817229372373455862177844478443391835484591525235651863464891177927244954925827786799436536592561374269299474738321293575385899438446558569241236278779779983587912431395475244796538888373287186921647426866237756737342731976763959499149996315591584716122199183295277439872911371313924594486766479438544417416529743495114819825984524437367225234184772617942525954961136976875325182725754768372684531972614455134523596338355374444273522115362238734383164778129376628621497662965456761631796178353599629887665939521892447361219479646483978798392716119793282717739524897385958273726776318154977675546287789874265339688753977185129334929715486381875286278528247696464162297691698154712775589541945263574897266575996455547625537947927972497979333932115165151462742216327321116291372396585618664475715321298122335789262942284571328414569375464386446824882551918843185195829547373915482687534432942778312542752798313434628498295216692646713137244198123219531693559848915834623825919191532658735422176965451741869666714874158492556445954852299161868651448123825821775363219246244515946392686275545561989355573946924767442253465342753995764791927951158771231944177692469531494559697911176613943396258141822244578457498361352381518166587583342233816989329544415621127397996723997397219676486966684729653763525768655324443991129862129181215339947555257279592921258246646215764736698583211625887436176149251356452358211458343439374688341116529726972434697324734525114192229641464227986582845477741747787673588848439713619326889624326944553386782821633538775371915973899959295232927996742218926514374168947582441892731462993481877277714436887597223871881149693228928442427611664655772333471893735932419937832937953495929514837663883938416644387342825836673733778119481514427512453357628396666791547531814844176342696362416842993761919369994779897357348334197721735231299249116477"
def sum_digits(string, step_size=1):
ongoing_sum = 0
size = len(input_string)
for index, char in enumerate(string):
digit = int(char)
correct_index = (index + step_size) % size
next_digit = int(string[correct_index])
if digit == next_digit:
ongoing_sum += digit
return ongoing_sum
def main():
step_size = 1
step_sum = sum_digits(input_string, step_size=step_size)
print "sum using step size {}: {}".format(step_size, step_sum)
step_size = len(input_string) / 2
step_sum = sum_digits(input_string, step_size=step_size)
print "sum using step size {}: {}".format(step_size, step_sum)
if __name__ == "__main__":
main()
| 90.483871 | 2,091 | 0.908378 | 103 | 2,805 | 24.378641 | 0.281553 | 0.038232 | 0.019116 | 0.014337 | 0.067702 | 0.067702 | 0.067702 | 0.067702 | 0.067702 | 0.067702 | 0 | 0.790415 | 0.062745 | 2,805 | 30 | 2,092 | 93.5 | 0.164701 | 0 | 0 | 0.2 | 0 | 0 | 0.760784 | 0.739394 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.1 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bc7b402e519cf03f537bbf4dfc8df40fdcce30de | 86 | py | Python | recommendations_microservice/namespaces/__init__.py | 7552-2020C2-grupo5/recommendations_microservice | 05e6780911c27719a8056d28c4adcbb82a0110ec | [
"Apache-2.0"
] | null | null | null | recommendations_microservice/namespaces/__init__.py | 7552-2020C2-grupo5/recommendations_microservice | 05e6780911c27719a8056d28c4adcbb82a0110ec | [
"Apache-2.0"
] | null | null | null | recommendations_microservice/namespaces/__init__.py | 7552-2020C2-grupo5/recommendations_microservice | 05e6780911c27719a8056d28c4adcbb82a0110ec | [
"Apache-2.0"
] | null | null | null | from .default import ns as default_namespace
from .token import ns as token_namespace
| 28.666667 | 44 | 0.837209 | 14 | 86 | 5 | 0.5 | 0.228571 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139535 | 86 | 2 | 45 | 43 | 0.945946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bce64e5b10fbe33cfef3f78dabd002d1d62054a5 | 112 | py | Python | app/main/application/api_model/__init__.py | meneel/TAP-API | 3f839a132044389bf4d27f978275d026071d6df1 | [
"MIT"
] | 1 | 2021-12-12T10:01:10.000Z | 2021-12-12T10:01:10.000Z | app/main/application/api_model/__init__.py | meneel/TAP-API | 3f839a132044389bf4d27f978275d026071d6df1 | [
"MIT"
] | null | null | null | app/main/application/api_model/__init__.py | meneel/TAP-API | 3f839a132044389bf4d27f978275d026071d6df1 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
from .login import *
from .validate import*
from .forgot_and_reset_password import *
| 18.666667 | 40 | 0.714286 | 15 | 112 | 5.133333 | 0.733333 | 0.25974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010526 | 0.151786 | 112 | 5 | 41 | 22.4 | 0.8 | 0.205357 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
bcfae1535b271507c2dbb069ecf9ebc19259da41 | 22,981 | py | Python | sdk/python/pulumi_aws/schemas/schema.py | rapzo/pulumi-aws | 390a098221315d98a54ba97d1559e750dc3053b7 | [
"ECL-2.0",
"Apache-2.0"
] | 260 | 2018-06-18T14:57:00.000Z | 2022-03-29T11:41:03.000Z | sdk/python/pulumi_aws/schemas/schema.py | rapzo/pulumi-aws | 390a098221315d98a54ba97d1559e750dc3053b7 | [
"ECL-2.0",
"Apache-2.0"
] | 1,154 | 2018-06-19T20:38:20.000Z | 2022-03-31T19:48:16.000Z | sdk/python/pulumi_aws/schemas/schema.py | rapzo/pulumi-aws | 390a098221315d98a54ba97d1559e750dc3053b7 | [
"ECL-2.0",
"Apache-2.0"
] | 115 | 2018-06-28T03:20:27.000Z | 2022-03-29T11:41:06.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['SchemaArgs', 'Schema']
@pulumi.input_type
class SchemaArgs:
def __init__(__self__, *,
content: pulumi.Input[str],
registry_name: pulumi.Input[str],
type: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Schema resource.
:param pulumi.Input[str] content: The schema specification. Must be a valid Open API 3.0 spec.
:param pulumi.Input[str] registry_name: The name of the registry in which this schema belongs.
:param pulumi.Input[str] type: The type of the schema. Valid values: `OpenApi3`.
:param pulumi.Input[str] description: The description of the schema. Maximum of 256 characters.
:param pulumi.Input[str] name: The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @.
"""
pulumi.set(__self__, "content", content)
pulumi.set(__self__, "registry_name", registry_name)
pulumi.set(__self__, "type", type)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def content(self) -> pulumi.Input[str]:
"""
The schema specification. Must be a valid Open API 3.0 spec.
"""
return pulumi.get(self, "content")
@content.setter
def content(self, value: pulumi.Input[str]):
pulumi.set(self, "content", value)
@property
@pulumi.getter(name="registryName")
def registry_name(self) -> pulumi.Input[str]:
"""
The name of the registry in which this schema belongs.
"""
return pulumi.get(self, "registry_name")
@registry_name.setter
def registry_name(self, value: pulumi.Input[str]):
pulumi.set(self, "registry_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of the schema. Valid values: `OpenApi3`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the schema. Maximum of 256 characters.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@pulumi.input_type
class _SchemaState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
last_modified: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
registry_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
version_created_date: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Schema resources.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the discoverer.
:param pulumi.Input[str] content: The schema specification. Must be a valid Open API 3.0 spec.
:param pulumi.Input[str] description: The description of the schema. Maximum of 256 characters.
:param pulumi.Input[str] last_modified: The last modified date of the schema.
:param pulumi.Input[str] name: The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @.
:param pulumi.Input[str] registry_name: The name of the registry in which this schema belongs.
:param pulumi.Input[str] type: The type of the schema. Valid values: `OpenApi3`.
:param pulumi.Input[str] version: The version of the schema.
:param pulumi.Input[str] version_created_date: The created date of the version of the schema.
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if content is not None:
pulumi.set(__self__, "content", content)
if description is not None:
pulumi.set(__self__, "description", description)
if last_modified is not None:
pulumi.set(__self__, "last_modified", last_modified)
if name is not None:
pulumi.set(__self__, "name", name)
if registry_name is not None:
pulumi.set(__self__, "registry_name", registry_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if tags_all is not None:
pulumi.set(__self__, "tags_all", tags_all)
if type is not None:
pulumi.set(__self__, "type", type)
if version is not None:
pulumi.set(__self__, "version", version)
if version_created_date is not None:
pulumi.set(__self__, "version_created_date", version_created_date)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
The Amazon Resource Name (ARN) of the discoverer.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter
def content(self) -> Optional[pulumi.Input[str]]:
"""
The schema specification. Must be a valid Open API 3.0 spec.
"""
return pulumi.get(self, "content")
@content.setter
def content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the schema. Maximum of 256 characters.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="lastModified")
def last_modified(self) -> Optional[pulumi.Input[str]]:
"""
The last modified date of the schema.
"""
return pulumi.get(self, "last_modified")
@last_modified.setter
def last_modified(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_modified", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="registryName")
def registry_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the registry in which this schema belongs.
"""
return pulumi.get(self, "registry_name")
@registry_name.setter
def registry_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "registry_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "tags_all")
@tags_all.setter
def tags_all(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags_all", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the schema. Valid values: `OpenApi3`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the schema.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@property
@pulumi.getter(name="versionCreatedDate")
def version_created_date(self) -> Optional[pulumi.Input[str]]:
"""
The created date of the version of the schema.
"""
return pulumi.get(self, "version_created_date")
@version_created_date.setter
def version_created_date(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version_created_date", value)
class Schema(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
registry_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an EventBridge Schema resource.
> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical.
## Example Usage
```python
import pulumi
import json
import pulumi_aws as aws
test_registry = aws.schemas.Registry("testRegistry")
test_schema = aws.schemas.Schema("testSchema",
registry_name=test_registry.name,
type="OpenApi3",
description="The schema definition for my event",
content=json.dumps({
"openapi": "3.0.0",
"info": {
"version": "1.0.0",
"title": "Event",
},
"paths": {},
"components": {
"schemas": {
"Event": {
"type": "object",
"properties": {
"name": {
"type": "string",
},
},
},
},
},
}))
```
## Import
EventBridge schema can be imported using the `name` and `registry_name`, e.g. console
```sh
$ pulumi import aws:schemas/schema:Schema test name/registry
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] content: The schema specification. Must be a valid Open API 3.0 spec.
:param pulumi.Input[str] description: The description of the schema. Maximum of 256 characters.
:param pulumi.Input[str] name: The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @.
:param pulumi.Input[str] registry_name: The name of the registry in which this schema belongs.
:param pulumi.Input[str] type: The type of the schema. Valid values: `OpenApi3`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SchemaArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an EventBridge Schema resource.
> **Note:** EventBridge was formerly known as CloudWatch Events. The functionality is identical.
## Example Usage
```python
import pulumi
import json
import pulumi_aws as aws
test_registry = aws.schemas.Registry("testRegistry")
test_schema = aws.schemas.Schema("testSchema",
registry_name=test_registry.name,
type="OpenApi3",
description="The schema definition for my event",
content=json.dumps({
"openapi": "3.0.0",
"info": {
"version": "1.0.0",
"title": "Event",
},
"paths": {},
"components": {
"schemas": {
"Event": {
"type": "object",
"properties": {
"name": {
"type": "string",
},
},
},
},
},
}))
```
## Import
EventBridge schema can be imported using the `name` and `registry_name`, e.g. console
```sh
$ pulumi import aws:schemas/schema:Schema test name/registry
```
:param str resource_name: The name of the resource.
:param SchemaArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SchemaArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
registry_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SchemaArgs.__new__(SchemaArgs)
if content is None and not opts.urn:
raise TypeError("Missing required property 'content'")
__props__.__dict__["content"] = content
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
if registry_name is None and not opts.urn:
raise TypeError("Missing required property 'registry_name'")
__props__.__dict__["registry_name"] = registry_name
__props__.__dict__["tags"] = tags
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
__props__.__dict__["arn"] = None
__props__.__dict__["last_modified"] = None
__props__.__dict__["tags_all"] = None
__props__.__dict__["version"] = None
__props__.__dict__["version_created_date"] = None
super(Schema, __self__).__init__(
'aws:schemas/schema:Schema',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
content: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
last_modified: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
registry_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
type: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
version_created_date: Optional[pulumi.Input[str]] = None) -> 'Schema':
"""
Get an existing Schema resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: The Amazon Resource Name (ARN) of the discoverer.
:param pulumi.Input[str] content: The schema specification. Must be a valid Open API 3.0 spec.
:param pulumi.Input[str] description: The description of the schema. Maximum of 256 characters.
:param pulumi.Input[str] last_modified: The last modified date of the schema.
:param pulumi.Input[str] name: The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @.
:param pulumi.Input[str] registry_name: The name of the registry in which this schema belongs.
:param pulumi.Input[str] type: The type of the schema. Valid values: `OpenApi3`.
:param pulumi.Input[str] version: The version of the schema.
:param pulumi.Input[str] version_created_date: The created date of the version of the schema.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SchemaState.__new__(_SchemaState)
__props__.__dict__["arn"] = arn
__props__.__dict__["content"] = content
__props__.__dict__["description"] = description
__props__.__dict__["last_modified"] = last_modified
__props__.__dict__["name"] = name
__props__.__dict__["registry_name"] = registry_name
__props__.__dict__["tags"] = tags
__props__.__dict__["tags_all"] = tags_all
__props__.__dict__["type"] = type
__props__.__dict__["version"] = version
__props__.__dict__["version_created_date"] = version_created_date
return Schema(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
The Amazon Resource Name (ARN) of the discoverer.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def content(self) -> pulumi.Output[str]:
"""
The schema specification. Must be a valid Open API 3.0 spec.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the schema. Maximum of 256 characters.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="lastModified")
def last_modified(self) -> pulumi.Output[str]:
"""
The last modified date of the schema.
"""
return pulumi.get(self, "last_modified")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the schema. Maximum of 385 characters consisting of lower case letters, upper case letters, ., -, _, @.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="registryName")
def registry_name(self) -> pulumi.Output[str]:
"""
The name of the registry in which this schema belongs.
"""
return pulumi.get(self, "registry_name")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsAll")
def tags_all(self) -> pulumi.Output[Mapping[str, str]]:
return pulumi.get(self, "tags_all")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the schema. Valid values: `OpenApi3`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def version(self) -> pulumi.Output[str]:
"""
The version of the schema.
"""
return pulumi.get(self, "version")
@property
@pulumi.getter(name="versionCreatedDate")
def version_created_date(self) -> pulumi.Output[str]:
"""
The created date of the version of the schema.
"""
return pulumi.get(self, "version_created_date")
| 38.753794 | 154 | 0.595187 | 2,601 | 22,981 | 5.064206 | 0.074971 | 0.099377 | 0.110538 | 0.086851 | 0.854236 | 0.823793 | 0.777407 | 0.74294 | 0.733602 | 0.71037 | 0 | 0.004794 | 0.29198 | 22,981 | 592 | 155 | 38.819257 | 0.804745 | 0.312911 | 0 | 0.633229 | 1 | 0 | 0.078224 | 0.001754 | 0 | 0 | 0 | 0 | 0 | 1 | 0.163009 | false | 0.003135 | 0.015674 | 0.015674 | 0.278997 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4c1a16b59824373399450ccb4c174080029ff4d7 | 26,708 | py | Python | ontoExtractor_b.py | Aliossandro/WDOntoHistory | d9b9abd73a037abab25e36a990bf1d2be8e54ed5 | [
"MIT"
] | null | null | null | ontoExtractor_b.py | Aliossandro/WDOntoHistory | d9b9abd73a037abab25e36a990bf1d2be8e54ed5 | [
"MIT"
] | null | null | null | ontoExtractor_b.py | Aliossandro/WDOntoHistory | d9b9abd73a037abab25e36a990bf1d2be8e54ed5 | [
"MIT"
] | null | null | null | import pandas as pd
import psycopg2
import pickle
import numpy as np
import json
from collections import defaultdict
# counterS = 0
# global counterS
# global valGlob
# from sqlalchemy import create_engine
# -*- coding: utf-8 -*-
import os
import sys
import copy
# fileName = '/Users/alessandro/Documents/PhD/OntoHistory/WDTaxo_October2014.csv'
# def find_all_paths(graph, start, end, path=[]):
# path = path + [start]
# if start == end:
# return [path]
# if start not in graph.keys():
# return []
# paths = []
# for node in graph[start]:
# if node not in set(path):
# newpaths = find_all_paths(graph, node, end, path)
# for newpath in newpaths:
# paths.append(newpath)
# return paths
def DFS(G,v,seen=None,path=None):
if seen is None: seen = []
if path is None: path = [v]
seen.append(v)
paths = []
for t in G[v]:
if t not in seen:
t_path = path + [t]
paths.append(tuple(t_path))
paths.extend(DFS(G, t, seen[:], t_path))
return paths
# def get_max_rows(df):
# B_maxes = df.groupby(['statementId', 'statValue']).revId.transform(min) == df['revId']
# return df[B_maxes]
# connection parameters
def get_db_params():
params = {
'database': 'wikidb',
'user': 'postgres',
'password': 'postSonny175',
'host': 'localhost',
'port': '5432'
}
conn = psycopg2.connect(**params)
return conn
# create table
def create_table():
###statement table query
query_table = """CREATE TABLE IF NOT EXISTS tempData AS (SELECT p.itemId, p.revId, (p.timestamp::timestamp) AS tS, t.statementId, t.statProperty, t.statvalue FROM
(SELECT itemId, revId, timestamp FROM revisionData_201710) p, (SELECT revId, statementId, statProperty, statvalue FROM statementsData_201710 WHERE statProperty = 'P279' OR statProperty = 'P31') t
WHERE p.revId = t.revId)"""
queryStatData = """CREATE TABLE IF NOT EXISTS statementDated AS (SELECT p.itemid, p.statproperty, p.statvalue, p.statementid, p.revid, t.timestamp, t.username
FROM statementsData_201710 p LEFT JOIN revisionData_201710 t ON p.revid::int = t.revid::int);"""
conn = None
try:
conn = get_db_params()
cur = conn.cursor()
cur.execute(query_table)
cur.close()
conn.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
conn = None
try:
conn = get_db_params()
cur = conn.cursor()
cur.execute(queryStatData)
cur.close()
conn.commit()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
def queryexecutor():
dictStats = {}
conn = get_db_params()
# cur = conn.cursor()
for i in range(15, 18):
for j in range(1, 10):
date = "20" + str(i) + "-0" + str(j) + "-01"
print(date)
try:
dictStats[date] = {}
query = """
SELECT * FROM tempData WHERE tS < '""" + date + """ 00:00:00'::timestamp;
"""
# print(query)
df = pd.DataFrame()
for chunk in pd.read_sql(query, con=conn, chunksize=100000):
df = df.append(chunk)
if len(df.index) != 0:
df = df[df['statvalue'] != 'deleted']
df = df[df['statvalue'] != 'novalue']
df = df[df['statvalue'] != 'somevalue']
idx = df.groupby(['statementid'])['revid'].transform(max) == df['revid']
dfClean = df[idx]
fileName = "WDHierarchy-" + date + ".csv"
dfWrite = dfClean.drop(['statementid', 'ts', 'revid'], axis = 1)
dfWrite.to_csv(fileName, index=False)
del dfWrite
# unique P279 and P31
uniqueClasses = dfClean['statvalue'].nunique()
dictStats[date]['uniqueClasses'] = uniqueClasses
uniqueAll = dfClean.groupby('statproperty')['statvalue'].nunique()
try:
dictStats[date]['P279'] = np.asscalar(uniqueAll['P279'])
except:
dictStats[date]['P279'] = 0
dictStats[date]['P31'] = np.asscalar(uniqueAll['P31'])
print('unique done')
### No. classes
dfClean['statvalue'] = dfClean['statvalue'].apply(lambda ni: str(ni))
dfClean['itemid'] = dfClean['itemid'].apply(lambda nu: str(nu))
subClasses = list(dfClean['itemid'].loc[dfClean['statproperty'] == "P279",].unique())
classesList = list(dfClean['statvalue'].unique())
# rootClasses = [x for x in classesList if x not in subClasses]
rootClasses = list(set(classesList) - set(subClasses))
instanceOf = list(dfClean['statvalue'].loc[dfClean['statproperty'] == 'P31',].unique())
# instanceOf = [k for k in instanceOf if k not in rootClasses]
instanceOf = list(set(instanceOf) - set(rootClasses))
leafClasses = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (~dfClean['itemid'].isin(dfClean['statvalue'])),].unique())
# shallowClasses = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (~dfClean['itemid'].isin(dfClean['statvalue'])) & (dfClean['statvalue'].isin(rootClasses)),].unique())
# firstSub = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (dfClean['statvalue'].isin(rootClasses)),].unique())
# twoDepth = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (~dfClean['itemid'].isin(dfClean['statvalue'])) & (~dfClean['statvalue'].isin(firstSub)),].unique())
# deepClasses = list(set(twoDepth) - set(shallowClasses))
# leafClasses = set(leafClasses + instanceOf)
classesList += subClasses
dictStats[date]['noClasses'] = len(set(classesList))
# childless classes; reduces computation time for avgDepth
superClasses = list(dfClean['statvalue'].loc[dfClean['statproperty'] == "P279",].unique())
childLessClasses = list(set(rootClasses) - set(superClasses))
dictStats[date]['childLessClasses'] = len(set(childLessClasses))
### No. root classes
dictStats[date]['noRoot'] = len(set(rootClasses))
### No. leaf classes
dictStats[date]['noLeaf'] = len((leafClasses))
print('no classes done')
### Avg. population metric and class richness
# Mean, median, and 0.25-0.75 quantiles no. instances per class
classCount = dfClean.groupby('statproperty')['statvalue'].value_counts()
classCountNew = classCount['P31'].to_dict()
dictStats[date]['classesWInstances'] = len(classCountNew)
for cl in classesList:
if cl not in classCountNew.keys():
classCountNew[cl] = 0
dictStats[date]['cRichness'] = len(classCountNew)/len(set(classesList))
instanceList = [classCountNew[l] for l in classCountNew.keys()]
dictStats[date]['avgPop'] = np.asscalar(np.mean(instanceList))
dictStats[date]['medianPop'] = np.asscalar(np.median(instanceList))
dictStats[date]['quantilePop'] = (np.asscalar(np.percentile(instanceList, 25)), np.asscalar(np.percentile(instanceList, 50)), np.asscalar(np.percentile(instanceList, 75)))
print('avg population done')
### inheritance richness
try:
classCountSub = classCount['P279'].to_dict()
except:
classCountSub = {}
for cl in classesList:
if cl not in classCountSub.keys():
classCountSub[cl] = 0
inheritanceList = [classCountSub[z] for z in classCountSub.keys()]
dictStats[date]['iRichness'] = np.asscalar(np.mean(inheritanceList))
dictStats[date]['medianInheritance'] = np.asscalar(np.median(inheritanceList))
dictStats[date]['quantileInheritance'] = (np.asscalar(np.percentile(inheritanceList, 25)), np.asscalar(np.percentile(inheritanceList, 50)), np.asscalar(np.percentile(inheritanceList, 75)))
print('inheritance done')
### Relationship richness
try:
queryRich = """SELECT itemid, statproperty, statvalue, statementid, revid, timestamp FROM statementDated WHERE timestamp < '""" + date + """ 00:00:00'::timestamp;"""
# print(query)
dfRich = pd.DataFrame()
for chunk in pd.read_sql(queryRich, con=conn, chunksize=25000):
dfRich = dfRich.append(chunk)
dfRich = dfRich.loc[(dfRich['itemid'],isin(classesList))]
dfRich = dfRich.loc[dfRich['statvalue'] != 'deleted',]
dfRich = dfRich.loc[dfRich['statvalue'] != 'novalue',]
dfRich = dfRich.loc[dfRich['statvalue'] != 'somevalue',]
idx = dfRich.groupby(['statementid'])['revid'].transform(max) == dfRich['revid']
dfRichClean = dfRich[idx]
richAll = dfRichClean.groupby('statproperty')['statvalue'].nunique()
dictStats[date]['relRichness'] = (richAll.sum() - np.asscalar(richAll['P279']))/richAll.sum()
except:
dictStats[date]['relRichness'] = 'NA'
else:
dictStats[date]['P279'] = 0
dictStats[date]['P31'] = 0
dictStats[date]['relRichness'] = 0
dictStats[date]['maxDepth'] = 0
dictStats[date]['avgDepth'] = 0
dictStats[date]['medianDepth'] = 0
dictStats[date]['quantileDepth'] = 0
dictStats[date]['iRichness'] = 0
dictStats[date]['medianInheritance'] = 0
dictStats[date]['quantileInheritance'] = 0
dictStats[date]['cRichness'] = 0
dictStats[date]['avgPop'] = 0
dictStats[date]['medianPop'] = 0
dictStats[date]['quantilePop'] = 0
dictStats[date]['classesWInstances'] = 0
dictStats[date]['noClasses'] = 0
dictStats[date]['childLessClasses'] = 0
### No. root classes
dictStats[date]['noRoot'] = 0
### No. leaf classes
dictStats[date]['noLeaf'] = 0
except Exception as e:
print(e, "no df available")
# try:
# query2 = """ SELECT DISTINCT itemId FROM (SELECT itemId, (timestamp::timestamp) FROM revisionData_201710 WHERE timestamp < '""" + date + """ 00:00:00' AND itemId !~* 'P[0-9]{1,}') AS fs;"""
# # print(query)
# dfIndiv = pd.DataFrame()
# for chunk in pd.read_sql(query2, con=conn, chunksize=500000):
# dfIndiv = dfIndiv.append(chunk)
#
# fileName = "WDIndiv-" + date + ".csv"
# dfIndiv.to_csv(fileName, index=False)
# except Exception as e:
# print(e, "no df available")
try:
query3 = """ SELECT DISTINCT itemId FROM (SELECT itemId, (timestamp::timestamp) FROM revisionData_201710 WHERE timestamp < '""" + date + """ 00:00:00'::timestamp AND itemId ~* 'P[0-9]{1,}') AS fs;"""
# print(query)
dfProp = pd.DataFrame()
for chunk in pd.read_sql(query3, con=conn, chunksize=500000):
dfProp = dfProp.append(chunk)
if len(dfProp.index) != 0:
fileName = "WDProp-" + date + ".csv"
dfProp.to_csv(fileName, index=False)
### No. properties
dictStats[date]['noProps'] = dfProp['itemid'].nunique()
else:
dictStats[date]['noProps'] = 0
### No. statements per property
try:
queryProps = """
SELECT statproperty, COUNT(*) AS propuse FROM (SELECT * FROM statementDated WHERE timestamp < '""" + date + """ 00:00:00'::timestamp) AS moo GROUP BY statproperty;
"""
dfPropUse = pd.read_sql(queryProps, con=conn)
if len(dfPropUse.index) != 0:
fileName = "WDPropUse-" + date + ".csv"
dfPropUse.to_csv(fileName, index=False)
propUseCount = list(dfPropUse['propuse'])
dictStats[date]['noPropUseAvg'] = np.asscalar(np.mean(propUseCount))
dictStats[date]['noPropUseMedian'] = np.asscalar(np.median(propUseCount))
dictStats[date]['noPropUseMax'] = max(propUseCount)
dictStats[date]['noPropUseMin'] = min(propUseCount)
dictStats[date]['noPropUseQuant'] = (np.asscalar(np.percentile(propUseCount, 25)), np.asscalar(np.percentile(propUseCount, 50)),
np.asscalar(np.percentile(propUseCount, 75)))
else:
dictStats[date]['noPropUseAvg'] = 0
dictStats[date]['noPropUseMedian'] = 0
dictStats[date]['noPropUseMax'] = 0
dictStats[date]['noPropUseMin'] = 0
dictStats[date]['noPropUseQuant'] = 0
except Exception as e:
print("propuse not available")
except Exception as e:
print(e, "no df available")
with open('WDataStats_2.txt', 'w') as myfile:
myfile.write(json.dumps(dictStats))
myfile.close()
for j in range(10, 13):
date = "20" + str(i) + "-" + str(j) + "-01"
print(date)
try:
dictStats[date] = {}
query = """
SELECT * FROM tempData WHERE tS < '""" + date + """ 00:00:00'::timestamp;
"""
df = pd.DataFrame()
for chunk in pd.read_sql(query, con=conn, chunksize=50000):
df = df.append(chunk)
if len(df.index) != 0:
df = df[df['statvalue'] != 'deleted']
idx = df.groupby(['statementid'])['revid'].transform(max) == df['revid']
dfClean = df[idx]
fileName = "WDHierarchy-" + date + ".csv"
dfWrite = dfClean.drop(['statementid', 'ts', 'revid'], axis = 1)
dfWrite.to_csv(fileName, index=False)
del dfWrite
# unique P279 and P31
uniqueClasses = dfClean['statvalue'].nunique()
dictStats[date]['uniqueClasses'] = uniqueClasses
uniqueAll = dfClean.groupby('statproperty')['statvalue'].nunique()
try:
dictStats[date]['P279'] = np.asscalar(uniqueAll['P279'])
except:
dictStats[date]['P279'] = 0
dictStats[date]['P31'] = np.asscalar(uniqueAll['P31'])
### No. classes
dfClean['statvalue'] = dfClean['statvalue'].apply(lambda ni: str(ni))
dfClean['itemid'] = dfClean['itemid'].apply(lambda nu: str(nu))
subClasses = list(dfClean['itemid'].loc[dfClean['statproperty'] == "P279",].unique())
classesList = list(dfClean['statvalue'].unique())
# rootClasses = [x for x in classesList if x not in subClasses]
rootClasses = list(set(classesList) - set(subClasses))
instanceOf = list(dfClean['statvalue'].loc[dfClean['statproperty'] == 'P31',].unique())
# instanceOf = [k for k in instanceOf if k not in rootClasses]
instanceOf = list(set(instanceOf) - set(rootClasses))
leafClasses = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (~dfClean['itemid'].isin(dfClean['statvalue'])),].unique())
# shallowClasses = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (~dfClean['itemid'].isin(dfClean['statvalue'])) & (dfClean['statvalue'].isin(rootClasses)),].unique())
# firstSub = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (dfClean['statvalue'].isin(rootClasses)),].unique())
# twoDepth = list(dfClean['itemid'].loc[(dfClean['statproperty'] == 'P279') & (~dfClean['itemid'].isin(dfClean['statvalue'])) & (~dfClean['statvalue'].isin(firstSub)),].unique())
# deepClasses = list(set(twoDepth) - set(shallowClasses))
# leafClasses = set(leafClasses + instanceOf)
classesList += subClasses
dictStats[date]['noClasses'] = len(set(classesList))
# childless classes; reduces computation time for avgDepth
superClasses = list(dfClean['statvalue'].loc[dfClean['statproperty'] == "P279",].unique())
childLessClasses = list(set(rootClasses) - set(superClasses))
dictStats[date]['childLessClasses'] = len(set(childLessClasses))
### No. root classes
dictStats[date]['noRoot'] = len(set(rootClasses))
### No. leaf classes
dictStats[date]['noLeaf'] = len((leafClasses))
### Avg. population metric and class richness
# Mean, median, and 0.25-0.75 quantiles no. instances per class
classCount = dfClean.groupby('statproperty')['statvalue'].value_counts()
classCountNew = classCount['P31'].to_dict()
dictStats[date]['classesWInstances'] = len(classCountNew)
for cl in classesList:
if cl not in classCountNew.keys():
classCountNew[cl] = 0
dictStats[date]['cRichness'] = len(classCountNew) / len(set(classesList))
instanceList = [classCountNew[l] for l in classCountNew.keys()]
dictStats[date]['avgPop'] = np.asscalar(np.mean(instanceList))
dictStats[date]['medianPop'] = np.asscalar(np.median(instanceList))
dictStats[date]['quantilePop'] = (
np.asscalar(np.percentile(instanceList, 25)), np.asscalar(np.percentile(instanceList, 50)), np.asscalar(np.percentile(instanceList, 75)))
print('class richness done')
### inheritance richness
classCountSub = classCount['P279'].to_dict()
for cl in classesList:
if cl not in classCountSub.keys():
classCountSub[cl] = 0
inheritanceList = [classCountSub[z] for z in classCountSub.keys()]
dictStats[date]['iRichness'] = np.asscalar(np.mean(inheritanceList))
dictStats[date]['medianInheritance'] = np.asscalar(np.median(inheritanceList))
dictStats[date]['quantileInheritance'] = (
np.asscalar(np.percentile(inheritanceList, 25)), np.asscalar(np.percentile(inheritanceList, 50)),
np.asscalar(np.percentile(inheritanceList, 75)))
print('inheritance done')
### Relationship richness
try:
queryRich = """SELECT itemid, statproperty, statvalue, statementid, revid, timestamp FROM statementDated WHERE timestamp < '""" + date + """ 00:00:00'::timestamp;"""
# print(query)
dfRich = pd.DataFrame()
for chunk in pd.read_sql(queryRich, con=conn, chunksize=25000):
dfRich = dfRich.append(chunk)
dfRich = dfRich.loc[(dfRich['itemid'],isin(classesList))]
dfRich = dfRich.loc[dfRich['statvalue'] != 'deleted',]
dfRich = dfRich.loc[dfRich['statvalue'] != 'novalue',]
dfRich = dfRich.loc[dfRich['statvalue'] != 'somevalue',]
idx = dfRich.groupby(['statementid'])['revid'].transform(max) == dfRich['revid']
dfRichClean = dfRich[idx]
richAll = dfRichClean.groupby('statproperty')['statvalue'].nunique()
dictStats[date]['relRichness'] = (richAll.sum() - np.asscalar(richAll['P279']))/richAll.sum()
except:
dictStats[date]['relRichness'] = 'NA'
else:
dictStats[date]['P279'] = 0
dictStats[date]['P31'] = 0
dictStats[date]['relRichness'] = 0
dictStats[date]['maxDepth'] = 0
dictStats[date]['avgDepth'] = 0
dictStats[date]['medianDepth'] = 0
dictStats[date]['quantileDepth'] = 0
dictStats[date]['iRichness'] = 0
dictStats[date]['medianInheritance'] = 0
dictStats[date]['quantileInheritance'] = 0
dictStats[date]['cRichness'] = 0
dictStats[date]['avgPop'] = 0
dictStats[date]['medianPop'] = 0
dictStats[date]['quantilePop'] = 0
dictStats[date]['classesWInstances'] = 0
dictStats[date]['noClasses'] = 0
dictStats[date]['childLessClasses'] = 0
### No. root classes
dictStats[date]['noRoot'] = 0
### No. leaf classes
dictStats[date]['noLeaf'] = 0
except Exception as e:
print(e, "no df available")
# try:
# query2 = """ SELECT DISTINCT itemId FROM (SELECT itemId, (timestamp::timestamp) FROM revisionData_201710 WHERE timestamp < '""" + date + """ 00:00:00' AND itemId !~* 'P[0-9]{1,}') AS fs;"""
# # print(query)
# dfIndiv = pd.DataFrame()
# for chunk in pd.read_sql(query2, con=conn, chunksize=500000):
# dfIndiv = dfIndiv.append(chunk)
#
# fileName = "WDIndiv-" + date + ".csv"
# dfIndiv.to_csv(fileName, index=False)
# except Exception as e:
# print(e, "no df available")
try:
query3 = """ SELECT DISTINCT itemId FROM (SELECT itemId, (timestamp::timestamp) FROM revisionData_201710 WHERE timestamp < '""" + date + """ 00:00:00'::timestamp AND itemId ~* 'P[0-9]{1,}') AS fs;"""
# print(query)
dfProp = pd.DataFrame()
for chunk in pd.read_sql(query3, con=conn, chunksize=500000):
dfProp = dfProp.append(chunk)
if len(dfProp.index) != 0:
fileName = "WDProp-" + date + ".csv"
dfProp.to_csv(fileName, index=False)
### No. properties
dictStats[date]['noProps'] = dfProp['itemid'].nunique()
else:
dictStats[date]['noProps'] = 0
### No. statements per property
try:
queryProps = """
SELECT statproperty, COUNT(*) AS propuse FROM (SELECT * FROM statementDated WHERE timestamp < '""" + date + """ 00:00:00'::timestamp) AS moo GROUP BY statproperty;
"""
dfPropUse = pd.read_sql(queryProps, con=conn)
if len(dfPropUse.index) != 0:
fileName = "WDPropUse-" + date + ".csv"
dfPropUse.to_csv(fileName, index=False)
propUseCount = list(dfPropUse['propuse'])
dictStats[date]['noPropUseAvg'] = np.asscalar(np.mean(propUseCount))
dictStats[date]['noPropUseMedian'] = np.asscalar(np.median(propUseCount))
dictStats[date]['noPropUseMax'] = max(propUseCount)
dictStats[date]['noPropUseMin'] = min(propUseCount)
dictStats[date]['noPropUseQuant'] = (np.asscalar(np.percentile(propUseCount, 25)), np.asscalar(np.percentile(propUseCount, 50)),
np.asscalar(np.percentile(propUseCount, 75)))
else:
dictStats[date]['noPropUseAvg'] = 0
dictStats[date]['noPropUseMedian'] = 0
dictStats[date]['noPropUseMax'] = 0
dictStats[date]['noPropUseMin'] = 0
dictStats[date]['noPropUseQuant'] = 0
except Exception as e:
print("propuse not available")
except Exception as e:
print(e, "no df available")
with open('WDataStats_20.txt', 'w') as myfile:
myfile.write(json.dumps(dictStats))
myfile.close()
# try:
# pickle_out = open("WDdata_1.pickle", "wb")
# pickle.dump(dictStats, pickle_out)
# pickle_out.close()
# except:
# print("suca")
def main():
# create_table()
queryexecutor()
if __name__ == "__main__":
main()
| 49.095588 | 215 | 0.511008 | 2,407 | 26,708 | 5.640631 | 0.122975 | 0.09575 | 0.045371 | 0.029167 | 0.869559 | 0.8591 | 0.8591 | 0.856301 | 0.856301 | 0.856301 | 0 | 0.02522 | 0.357159 | 26,708 | 543 | 216 | 49.186004 | 0.765566 | 0.154261 | 0 | 0.811798 | 0 | 0.014045 | 0.18117 | 0.010516 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014045 | false | 0.002809 | 0.025281 | 0 | 0.044944 | 0.044944 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4c3d57e9b3728b901248a9b10591137615c6602e | 8,960 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/community/hrobot/tests/unit/plugins/modules/test_failover_ip.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | venv/lib/python3.6/site-packages/ansible_collections/community/hrobot/tests/unit/plugins/modules/test_failover_ip.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/community/hrobot/tests/unit/plugins/modules/test_failover_ip.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | null | null | null | # (c) 2020 Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.community.internal_test_tools.tests.unit.utils.fetch_url_module_framework import (
FetchUrlCall,
BaseTestModule,
)
from ansible_collections.community.hrobot.plugins.module_utils.robot import BASE_URL
from ansible_collections.community.hrobot.plugins.modules import failover_ip
class TestHetznerFailoverIP(BaseTestModule):
MOCK_ANSIBLE_MODULEUTILS_BASIC_ANSIBLEMODULE = 'ansible_collections.community.hrobot.plugins.modules.failover_ip.AnsibleModule'
MOCK_ANSIBLE_MODULEUTILS_URLS_FETCH_URL = 'ansible_collections.community.hrobot.plugins.module_utils.robot.fetch_url'
# Tests for state idempotence (routed and unrouted)
def test_unrouted(self, mocker):
result = self.run_module_success(mocker, failover_ip, {
'hetzner_user': '',
'hetzner_password': '',
'failover_ip': '1.2.3.4',
'state': 'unrouted',
}, [
FetchUrlCall('GET', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': None,
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
])
assert result['changed'] is False
assert result['value'] is None
assert result['state'] == 'unrouted'
def test_routed(self, mocker):
result = self.run_module_success(mocker, failover_ip, {
'hetzner_user': '',
'hetzner_password': '',
'failover_ip': '1.2.3.4',
'state': 'routed',
'value': '4.3.2.1',
}, [
FetchUrlCall('GET', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': '4.3.2.1',
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
])
assert result['changed'] is False
assert result['value'] == '4.3.2.1'
assert result['state'] == 'routed'
# Tests for changing state (unrouted to routed, vice versa)
def test_unrouted_to_routed(self, mocker):
result = self.run_module_success(mocker, failover_ip, {
'hetzner_user': '',
'hetzner_password': '',
'failover_ip': '1.2.3.4',
'state': 'routed',
'value': '4.3.2.1',
}, [
FetchUrlCall('GET', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': None,
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
FetchUrlCall('POST', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': '4.3.2.1',
},
})
.expect_form_value('active_server_ip', '4.3.2.1')
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
])
assert result['changed'] is True
assert result['value'] == '4.3.2.1'
assert result['state'] == 'routed'
def test_unrouted_to_routed_check_mode(self, mocker):
result = self.run_module_success(mocker, failover_ip, {
'hetzner_user': '',
'hetzner_password': '',
'failover_ip': '1.2.3.4',
'state': 'routed',
'value': '4.3.2.1',
'_ansible_check_mode': True,
}, [
FetchUrlCall('GET', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': None,
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
])
assert result['changed'] is True
assert result['value'] == '4.3.2.1'
assert result['state'] == 'routed'
def test_routed_to_unrouted(self, mocker):
result = self.run_module_success(mocker, failover_ip, {
'hetzner_user': '',
'hetzner_password': '',
'failover_ip': '1.2.3.4',
'state': 'unrouted',
}, [
FetchUrlCall('GET', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': '4.3.2.1',
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
FetchUrlCall('DELETE', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': None,
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
])
assert result['changed'] is True
assert result['value'] is None
assert result['state'] == 'unrouted'
# Tests for re-routing
def test_rerouting(self, mocker):
result = self.run_module_success(mocker, failover_ip, {
'hetzner_user': '',
'hetzner_password': '',
'failover_ip': '1.2.3.4',
'state': 'routed',
'value': '4.3.2.1',
}, [
FetchUrlCall('GET', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': '5.4.3.2',
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
FetchUrlCall('POST', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': '4.3.2.1',
},
})
.expect_form_value('active_server_ip', '4.3.2.1')
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
])
assert result['changed'] is True
assert result['value'] == '4.3.2.1'
assert result['state'] == 'routed'
def test_rerouting_already_routed(self, mocker):
result = self.run_module_success(mocker, failover_ip, {
'hetzner_user': '',
'hetzner_password': '',
'failover_ip': '1.2.3.4',
'state': 'routed',
'value': '4.3.2.1',
}, [
FetchUrlCall('GET', 200)
.result_json({
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': '5.4.3.2',
},
})
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
FetchUrlCall('POST', 409)
.result_json({
'error': {
'status': 409,
'code': 'FAILOVER_ALREADY_ROUTED',
'message': 'Failover already routed',
},
'failover': {
'ip': '1.2.3.4',
'netmask': '255.255.255.255',
'server_ip': '2.3.4.5',
'server_number': 2345,
'active_server_ip': '4.3.2.1',
},
})
.expect_form_value('active_server_ip', '4.3.2.1')
.expect_url('{0}/failover/1.2.3.4'.format(BASE_URL)),
])
assert result['changed'] is False
assert result['value'] == '4.3.2.1'
assert result['state'] == 'routed'
| 36.571429 | 131 | 0.460826 | 951 | 8,960 | 4.144059 | 0.119874 | 0.020299 | 0.030449 | 0.029434 | 0.815529 | 0.807156 | 0.781274 | 0.781274 | 0.752855 | 0.737123 | 0 | 0.085295 | 0.383705 | 8,960 | 244 | 132 | 36.721311 | 0.628396 | 0.029129 | 0 | 0.834081 | 0 | 0 | 0.253307 | 0.020016 | 0 | 0 | 0 | 0 | 0.09417 | 1 | 0.03139 | false | 0.03139 | 0.017937 | 0 | 0.06278 | 0.004484 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4c62bd1071abd4be60a9e6f828e5f4afe009fb30 | 13,450 | py | Python | utils/multiwoz/dbPointer.py | jshin49/simpletod | 55d6bf26d7f107dbf01795bc40b9f075292459cf | [
"BSD-3-Clause"
] | 167 | 2020-07-26T08:33:54.000Z | 2022-03-24T13:30:07.000Z | utils/multiwoz/dbPointer.py | jshin49/simpletod | 55d6bf26d7f107dbf01795bc40b9f075292459cf | [
"BSD-3-Clause"
] | 27 | 2020-07-27T07:07:05.000Z | 2021-11-30T11:10:36.000Z | utils/multiwoz/dbPointer.py | jshin49/simpletod | 55d6bf26d7f107dbf01795bc40b9f075292459cf | [
"BSD-3-Clause"
] | 62 | 2020-08-02T18:25:55.000Z | 2022-02-06T15:04:57.000Z | import sqlite3
import numpy as np
from .nlp import normalize
import os
PATH = './utils/multiwoz'
# loading databases
domains = ['restaurant', 'hotel', 'attraction', 'train', 'taxi', 'hospital']#, 'police']
dbs = {}
for domain in domains:
db = os.path.join(PATH, 'db/{}-dbase.db'.format(domain))
conn = sqlite3.connect(db)
c = conn.cursor()
dbs[domain] = c
def convert_dbpointer_to_text(vect, goal, belief):
domain_in_pointer = ['restaurant', 'hotel', 'attraction', 'train']
restaurant_book_vec = vect[24:26]
hotel_book_vec = vect[26:28]
train_book_vec = vect[28:]
text = []
for idx in range(4):
domain = domains[idx]
if domain not in goal:
continue
Flag = False
for bs in belief:
if bs[0] == domain:
Flag = True
if not Flag: # not bstate for domain
continue
domain_vec = vect[idx * 6: idx * 6 + 6]
if domain != 'train':
if np.all(domain_vec == np.array([1, 0, 0, 0, 0, 0])):
domain_match = 0
elif np.all(domain_vec == np.array([0, 1, 0, 0, 0, 0])):
domain_match = 1
elif np.all(domain_vec == np.array([0, 0, 1, 0, 0, 0])):
domain_match = 2
elif np.all(domain_vec == np.array([0, 0, 0, 1, 0, 0])):
domain_match = 3
elif np.all(domain_vec == np.array([0, 0, 0, 0, 1, 0])):
domain_match = 4
elif np.all(domain_vec == np.array([0, 0, 0, 0, 0, 1])):
domain_match = 5
else:
raise ValueError('invalid domain match')
if domain_match >= 5:
domain_match_text = '>=5'
else:
domain_match_text = '={}'.format(domain_match)
if (domain == 'restaurant' and np.all(restaurant_book_vec == np.array([0, 1]))) or (domain == 'hotel' and np.all(hotel_book_vec == np.array([0, 1]))):
text.append('{} match{} booking=available'.format(domain, domain_match_text))
else:
text.append('{} match{} booking=not available'.format(domain, domain_match_text))
else: # train domain
if np.all(domain_vec == np.array([1, 0, 0, 0, 0, 0])):
domain_match = 0
elif np.all(domain_vec == np.array([0, 1, 0, 0, 0, 0])):
domain_match = 2
elif np.all(domain_vec == np.array([0, 0, 1, 0, 0, 0])):
domain_match = 5
elif np.all(domain_vec == np.array([0, 0, 0, 1, 0, 0])):
domain_match = 10
elif np.all(domain_vec == np.array([0, 0, 0, 0, 1, 0])):
domain_match = 40
elif np.all(domain_vec == np.array([0, 0, 0, 0, 0, 1])):
domain_match = 41
else:
raise ValueError('invalid domain match')
if domain_match == 0:
domain_match_text = '=0'
elif domain_match == 2:
domain_match_text = '<3'
elif domain_match == 5:
domain_match_text = '<6'
elif domain_match == 10:
domain_match_text = '<11'
elif domain_match == 40:
domain_match_text = '<41'
else:
domain_match_text = '>40'
if np.all(train_book_vec == np.array([0, 1])):
text.append('{} match{} booking=available'.format(domain, domain_match_text))
else:
text.append('{} match{} booking=not available'.format(domain, domain_match_text))
return ' , '.join(text)
def convert_dbpointer_to_text_nmatch(vect, goal, belief):
domain_in_pointer = ['restaurant', 'hotel', 'attraction', 'train']
restaurant_book_vec = vect[24:26]
hotel_book_vec = vect[26:28]
train_book_vec = vect[28:]
text = []
for idx in range(4):
domain = domains[idx]
if domain not in goal:
continue
Flag = False
for bs in belief:
if bs[0] == domain:
Flag = True
if not Flag: # not bstate for domain
continue
domain_vec = vect[idx * 6: idx * 6 + 6]
if domain != 'train':
if np.all(domain_vec == np.array([1, 0, 0, 0, 0, 0])):
domain_match = 0
elif np.all(domain_vec == np.array([0, 1, 0, 0, 0, 0])):
domain_match = 1
elif np.all(domain_vec == np.array([0, 0, 1, 0, 0, 0])):
domain_match = 2
elif np.all(domain_vec == np.array([0, 0, 0, 1, 0, 0])):
domain_match = 3
elif np.all(domain_vec == np.array([0, 0, 0, 0, 1, 0])):
domain_match = 4
elif np.all(domain_vec == np.array([0, 0, 0, 0, 0, 1])):
domain_match = 5
else:
raise ValueError('invalid domain match')
if domain_match >= 5:
domain_match_text = '>=5'
else:
domain_match_text = '={}'.format(domain_match)
text.append('{} match{}'.format(domain, domain_match_text))
# if (domain == 'restaurant' and np.all(restaurant_book_vec == np.array([0, 1]))) or (domain == 'hotel' and np.all(hotel_book_vec == np.array([0, 1]))):
# # text.append('{} match{} booking=available'.format(domain, domain_match_text))
# text.append('{} match{}'.format(domain, domain_match_text))
# else:
# text.append('{} match{} booking=not available'.format(domain, domain_match_text))
else: # train domain
if np.all(domain_vec == np.array([1, 0, 0, 0, 0, 0])):
domain_match = 0
elif np.all(domain_vec == np.array([0, 1, 0, 0, 0, 0])):
domain_match = 2
elif np.all(domain_vec == np.array([0, 0, 1, 0, 0, 0])):
domain_match = 5
elif np.all(domain_vec == np.array([0, 0, 0, 1, 0, 0])):
domain_match = 10
elif np.all(domain_vec == np.array([0, 0, 0, 0, 1, 0])):
domain_match = 40
elif np.all(domain_vec == np.array([0, 0, 0, 0, 0, 1])):
domain_match = 41
else:
raise ValueError('invalid domain match')
if domain_match == 0:
domain_match_text = '=0'
elif domain_match == 2:
domain_match_text = '<3'
elif domain_match == 5:
domain_match_text = '<6'
elif domain_match == 10:
domain_match_text = '<11'
elif domain_match == 40:
domain_match_text = '<41'
else:
domain_match_text = '>40'
text.append('{} match{}'.format(domain, domain_match_text))
# if np.all(train_book_vec == np.array([0, 1])):
# text.append('{} match{} booking=available'.format(domain, domain_match_text))
# else:
# text.append('{} match{} booking=not available'.format(domain, domain_match_text))
return ' , '.join(text)
def oneHotVector(num, domain, vector):
"""Return number of available entities for particular domain."""
number_of_options = 6
if domain != 'train':
idx = domains.index(domain)
if num == 0:
vector[idx * 6: idx * 6 + 6] = np.array([1, 0, 0, 0, 0,0])
elif num == 1:
vector[idx * 6: idx * 6 + 6] = np.array([0, 1, 0, 0, 0, 0])
elif num == 2:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 1, 0, 0, 0])
elif num == 3:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 0, 1, 0, 0])
elif num == 4:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 0, 0, 1, 0])
elif num >= 5:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 0, 0, 0, 1])
else:
idx = domains.index(domain)
if num == 0:
vector[idx * 6: idx * 6 + 6] = np.array([1, 0, 0, 0, 0, 0])
elif num <= 2:
vector[idx * 6: idx * 6 + 6] = np.array([0, 1, 0, 0, 0, 0])
elif num <= 5:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 1, 0, 0, 0])
elif num <= 10:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 0, 1, 0, 0])
elif num <= 40:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 0, 0, 1, 0])
elif num > 40:
vector[idx * 6: idx * 6 + 6] = np.array([0, 0, 0, 0, 0, 1])
return vector
def queryResult(domain, turn):
"""Returns the list of entities for a given domain
based on the annotation of the belief state"""
# query the db
sql_query = "select * from {}".format(domain)
flag = True
#print turn['metadata'][domain]['semi']
for key, val in turn['metadata'][domain]['semi'].items():
if val == "" or val == "dont care" or val == 'not mentioned' or val == "don't care" or val == "dontcare" or val == "do n't care":
pass
else:
if flag:
sql_query += " where "
val2 = val.replace("'", "''")
#val2 = normalize(val2)
# change query for trains
if key == 'leaveAt':
sql_query += r" " + key + " > " + r"'" + val2 + r"'"
elif key == 'arriveBy':
sql_query += r" " + key + " < " + r"'" + val2 + r"'"
else:
sql_query += r" " + key + "=" + r"'" + val2 + r"'"
flag = False
else:
val2 = val.replace("'", "''")
#val2 = normalize(val2)
if key == 'leaveAt':
sql_query += r" and " + key + " > " + r"'" + val2 + r"'"
elif key == 'arriveBy':
sql_query += r" and " + key + " < " + r"'" + val2 + r"'"
else:
sql_query += r" and " + key + "=" + r"'" + val2 + r"'"
#try: # "select * from attraction where name = 'queens college'"
#print sql_query
#print domain
num_entities = len(dbs[domain].execute(sql_query).fetchall())
return num_entities
def queryResultVenues(domain, turn, real_belief=False):
# query the db
sql_query = "select * from {}".format(domain)
if real_belief == True:
items = turn.items()
elif real_belief=='tracking':
for slot in turn[domain]:
key = slot[0].split("-")[1]
val = slot[0].split("-")[2]
if key == "price range":
key = "pricerange"
elif key == "leave at":
key = "leaveAt"
elif key == "arrive by":
key = "arriveBy"
if val == "do n't care":
pass
else:
if flag:
sql_query += " where "
val2 = val.replace("'", "''")
val2 = normalize(val2)
if key == 'leaveAt':
sql_query += key + " > " + r"'" + val2 + r"'"
elif key == 'arriveBy':
sql_query += key + " < " + r"'" + val2 + r"'"
else:
sql_query += r" " + key + "=" + r"'" + val2 + r"'"
flag = False
else:
val2 = val.replace("'", "''")
val2 = normalize(val2)
if key == 'leaveAt':
sql_query += r" and " + key + " > " + r"'" + val2 + r"'"
elif key == 'arriveBy':
sql_query += r" and " + key + " < " + r"'" + val2 + r"'"
else:
sql_query += r" and " + key + "=" + r"'" + val2 + r"'"
try: # "select * from attraction where name = 'queens college'"
return dbs[domain].execute(sql_query).fetchall()
except:
return [] # TODO test it
pass
else:
items = turn['metadata'][domain]['semi'].items()
flag = True
for key, val in items:
if val == "" or val == "dontcare" or val == 'not mentioned' or val == "don't care" or val == "dont care" or val == "do n't care":
pass
else:
if flag:
sql_query += " where "
val2 = val.replace("'", "''")
val2 = normalize(val2)
if key == 'leaveAt':
sql_query += r" " + key + " > " + r"'" + val2 + r"'"
elif key == 'arriveBy':
sql_query += r" " +key + " < " + r"'" + val2 + r"'"
else:
sql_query += r" " + key + "=" + r"'" + val2 + r"'"
flag = False
else:
val2 = val.replace("'", "''")
val2 = normalize(val2)
if key == 'leaveAt':
sql_query += r" and " + key + " > " + r"'" + val2 + r"'"
elif key == 'arriveBy':
sql_query += r" and " + key + " < " + r"'" + val2 + r"'"
else:
sql_query += r" and " + key + "=" + r"'" + val2 + r"'"
try: # "select * from attraction where name = 'queens college'"
return dbs[domain].execute(sql_query).fetchall()
except:
return [] # TODO test it
| 39.675516 | 164 | 0.458587 | 1,651 | 13,450 | 3.622653 | 0.092065 | 0.040127 | 0.036114 | 0.024076 | 0.857215 | 0.834309 | 0.825447 | 0.825447 | 0.822939 | 0.790169 | 0 | 0.050926 | 0.38974 | 13,450 | 338 | 165 | 39.792899 | 0.677753 | 0.091152 | 0 | 0.818505 | 0 | 0 | 0.075616 | 0 | 0 | 0 | 0 | 0.002959 | 0 | 1 | 0.017794 | false | 0.014235 | 0.014235 | 0 | 0.060498 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d5f6e618f4b253796012ca2e4cf3936aeecad609 | 195,285 | py | Python | env/Lib/site-packages/algorithmia_api_client/api/default_api.py | Vivek-Kamboj/Sargam | 0f3ca5c70ddb722dd40a45373abd0e9b3939064e | [
"MIT"
] | null | null | null | env/Lib/site-packages/algorithmia_api_client/api/default_api.py | Vivek-Kamboj/Sargam | 0f3ca5c70ddb722dd40a45373abd0e9b3939064e | [
"MIT"
] | 5 | 2021-04-25T08:16:09.000Z | 2022-03-12T00:42:14.000Z | env/Lib/site-packages/algorithmia_api_client/api/default_api.py | Vivek-Kamboj/Sargam | 0f3ca5c70ddb722dd40a45373abd0e9b3939064e | [
"MIT"
] | 1 | 2021-10-01T14:32:25.000Z | 2021-10-01T14:32:25.000Z | # coding: utf-8
"""
Algorithmia Management APIs
APIs for managing actions on the Algorithmia platform # noqa: E501
OpenAPI spec version: 1.0.1
Contact: support@algorithmia.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from algorithmia_api_client.api_client import ApiClient
class DefaultApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_organization_member(self, org_name, username, **kwargs): # noqa: E501
"""Add a user as a member of a given organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_organization_member(org_name, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:param str username: The user's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_organization_member_with_http_info(org_name, username, **kwargs) # noqa: E501
else:
(data) = self.add_organization_member_with_http_info(org_name, username, **kwargs) # noqa: E501
return data
def add_organization_member_with_http_info(self, org_name, username, **kwargs): # noqa: E501
"""Add a user as a member of a given organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_organization_member_with_http_info(org_name, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:param str username: The user's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['org_name', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_organization_member" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'org_name' is set
if ('org_name' not in local_var_params or
local_var_params['org_name'] is None):
raise ValueError("Missing the required parameter `org_name` when calling `add_organization_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `add_organization_member`") # noqa: E501
if ('org_name' in local_var_params and
len(local_var_params['org_name']) > 30):
raise ValueError("Invalid value for parameter `org_name` when calling `add_organization_member`, length must be less than or equal to `30`") # noqa: E501
if 'org_name' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['org_name']): # noqa: E501
raise ValueError("Invalid value for parameter `org_name` when calling `add_organization_member`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `add_organization_member`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `add_organization_member`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org_name' in local_var_params:
path_params['orgName'] = local_var_params['org_name'] # noqa: E501
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/organizations/{orgName}/members/{username}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def algorithms_username_algoname_compile_post(self, username, algoname, **kwargs): # noqa: E501
"""Compile algorithm # noqa: E501
Compiles the latest available source code for an algorithm, resulting in a new hash version of an algorithm available for publishing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.algorithms_username_algoname_compile_post(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.algorithms_username_algoname_compile_post_with_http_info(username, algoname, **kwargs) # noqa: E501
else:
(data) = self.algorithms_username_algoname_compile_post_with_http_info(username, algoname, **kwargs) # noqa: E501
return data
def algorithms_username_algoname_compile_post_with_http_info(self, username, algoname, **kwargs): # noqa: E501
"""Compile algorithm # noqa: E501
Compiles the latest available source code for an algorithm, resulting in a new hash version of an algorithm available for publishing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.algorithms_username_algoname_compile_post_with_http_info(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method algorithms_username_algoname_compile_post" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `algorithms_username_algoname_compile_post`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `algorithms_username_algoname_compile_post`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `algorithms_username_algoname_compile_post`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `algorithms_username_algoname_compile_post`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/compile', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def commit_log_for_repository(self, username, algoname, **kwargs): # noqa: E501
"""Gets the list of commits for a repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.commit_log_for_repository(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str since: The first commit SHA of the repository to list in the commits. This is included in the result list.
:param str until: The last commit SHA of the repository to list in the commits. This is included in the result list.
:return: RepositoryCommitLog
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.commit_log_for_repository_with_http_info(username, algoname, **kwargs) # noqa: E501
else:
(data) = self.commit_log_for_repository_with_http_info(username, algoname, **kwargs) # noqa: E501
return data
def commit_log_for_repository_with_http_info(self, username, algoname, **kwargs): # noqa: E501
"""Gets the list of commits for a repository # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.commit_log_for_repository_with_http_info(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str since: The first commit SHA of the repository to list in the commits. This is included in the result list.
:param str until: The last commit SHA of the repository to list in the commits. This is included in the result list.
:return: RepositoryCommitLog
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'since', 'until'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method commit_log_for_repository" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `commit_log_for_repository`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `commit_log_for_repository`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `commit_log_for_repository`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `commit_log_for_repository`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
if 'since' in local_var_params:
query_params.append(('since', local_var_params['since'])) # noqa: E501
if 'until' in local_var_params:
query_params.append(('until', local_var_params['until'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/log', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryCommitLog', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_algorithm(self, username, create_request, **kwargs): # noqa: E501
"""Create Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_algorithm(username, create_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param CreateRequest create_request: Algorithm Create Request (required)
:return: HashResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_algorithm_with_http_info(username, create_request, **kwargs) # noqa: E501
else:
(data) = self.create_algorithm_with_http_info(username, create_request, **kwargs) # noqa: E501
return data
def create_algorithm_with_http_info(self, username, create_request, **kwargs): # noqa: E501
"""Create Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_algorithm_with_http_info(username, create_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param CreateRequest create_request: Algorithm Create Request (required)
:return: HashResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'create_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_algorithm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `create_algorithm`") # noqa: E501
# verify the required parameter 'create_request' is set
if ('create_request' not in local_var_params or
local_var_params['create_request'] is None):
raise ValueError("Missing the required parameter `create_request` when calling `create_algorithm`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `create_algorithm`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `create_algorithm`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_request' in local_var_params:
body_params = local_var_params['create_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HashResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_organization(self, organization, **kwargs): # noqa: E501
"""Creates an Organization owned by the caller # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_organization(organization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Organization organization: Request with Organization (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_organization_with_http_info(organization, **kwargs) # noqa: E501
else:
(data) = self.create_organization_with_http_info(organization, **kwargs) # noqa: E501
return data
def create_organization_with_http_info(self, organization, **kwargs): # noqa: E501
"""Creates an Organization owned by the caller # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_organization_with_http_info(organization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Organization organization: Request with Organization (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['organization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_organization" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'organization' is set
if ('organization' not in local_var_params or
local_var_params['organization'] is None):
raise ValueError("Missing the required parameter `organization` when calling `create_organization`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'organization' in local_var_params:
body_params = local_var_params['organization']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/organizations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Organization', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_user(self, user, **kwargs): # noqa: E501
"""Creates an Algorithmia user in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_user(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param User user: Request with User (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_user_with_http_info(user, **kwargs) # noqa: E501
else:
(data) = self.create_user_with_http_info(user, **kwargs) # noqa: E501
return data
def create_user_with_http_info(self, user, **kwargs): # noqa: E501
"""Creates an Algorithmia user in the system # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_user_with_http_info(user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param User user: Request with User (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'user' is set
if ('user' not in local_var_params or
local_var_params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `create_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'user' in local_var_params:
body_params = local_var_params['user']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_algorithm(self, username, algoname, **kwargs): # noqa: E501
"""Delete Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_algorithm(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_algorithm_with_http_info(username, algoname, **kwargs) # noqa: E501
else:
(data) = self.delete_algorithm_with_http_info(username, algoname, **kwargs) # noqa: E501
return data
def delete_algorithm_with_http_info(self, username, algoname, **kwargs): # noqa: E501
"""Delete Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_algorithm_with_http_info(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_algorithm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `delete_algorithm`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `delete_algorithm`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `delete_algorithm`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `delete_algorithm`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_config_entry(self, keyname, **kwargs): # noqa: E501
"""Delete a config entry represented by the provided keyname # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_config_entry(keyname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str keyname: The keyname for the config map entry (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_config_entry_with_http_info(keyname, **kwargs) # noqa: E501
else:
(data) = self.delete_config_entry_with_http_info(keyname, **kwargs) # noqa: E501
return data
def delete_config_entry_with_http_info(self, keyname, **kwargs): # noqa: E501
"""Delete a config entry represented by the provided keyname # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_config_entry_with_http_info(keyname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str keyname: The keyname for the config map entry (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['keyname'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_config_entry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'keyname' is set
if ('keyname' not in local_var_params or
local_var_params['keyname'] is None):
raise ValueError("Missing the required parameter `keyname` when calling `delete_config_entry`") # noqa: E501
if ('keyname' in local_var_params and
len(local_var_params['keyname']) > 64):
raise ValueError("Invalid value for parameter `keyname` when calling `delete_config_entry`, length must be less than or equal to `64`") # noqa: E501
if 'keyname' in local_var_params and not re.search(r'^[\\w][\\w-.]*[\\w]$', local_var_params['keyname']): # noqa: E501
raise ValueError("Invalid value for parameter `keyname` when calling `delete_config_entry`, must conform to the pattern `/^[\\w][\\w-.]*[\\w]$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'keyname' in local_var_params:
path_params['keyname'] = local_var_params['keyname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/frontend/{keyname}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_favicon(self, **kwargs): # noqa: E501
"""Delete favicon for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_favicon(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_favicon_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_favicon_with_http_info(**kwargs) # noqa: E501
return data
def delete_favicon_with_http_info(self, **kwargs): # noqa: E501
"""Delete favicon for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_favicon_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_favicon" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/favicon', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_logo(self, **kwargs): # noqa: E501
"""Delete logo for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_logo(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_logo_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_logo_with_http_info(**kwargs) # noqa: E501
return data
def delete_logo_with_http_info(self, **kwargs): # noqa: E501
"""Delete logo for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_logo_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_logo" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/logo', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_organization(self, org_name, **kwargs): # noqa: E501
"""Delete an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_organization(org_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_organization_with_http_info(org_name, **kwargs) # noqa: E501
else:
(data) = self.delete_organization_with_http_info(org_name, **kwargs) # noqa: E501
return data
def delete_organization_with_http_info(self, org_name, **kwargs): # noqa: E501
"""Delete an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_organization_with_http_info(org_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['org_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_organization" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'org_name' is set
if ('org_name' not in local_var_params or
local_var_params['org_name'] is None):
raise ValueError("Missing the required parameter `org_name` when calling `delete_organization`") # noqa: E501
if ('org_name' in local_var_params and
len(local_var_params['org_name']) > 30):
raise ValueError("Invalid value for parameter `org_name` when calling `delete_organization`, length must be less than or equal to `30`") # noqa: E501
if 'org_name' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['org_name']): # noqa: E501
raise ValueError("Invalid value for parameter `org_name` when calling `delete_organization`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org_name' in local_var_params:
path_params['orgName'] = local_var_params['org_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/organizations/{orgName}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_user(self, username, **kwargs): # noqa: E501
"""Delete a user represented by the provided username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_user_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.delete_user_with_http_info(username, **kwargs) # noqa: E501
return data
def delete_user_with_http_info(self, username, **kwargs): # noqa: E501
"""Delete a user represented by the provided username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `delete_user`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `delete_user`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `delete_user`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_algorithm(self, username, algoname, **kwargs): # noqa: E501
"""Get Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_algorithm_with_http_info(username, algoname, **kwargs) # noqa: E501
else:
(data) = self.get_algorithm_with_http_info(username, algoname, **kwargs) # noqa: E501
return data
def get_algorithm_with_http_info(self, username, algoname, **kwargs): # noqa: E501
"""Get Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_with_http_info(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_algorithm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `get_algorithm`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `get_algorithm`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_algorithm_build_by_id(self, username, algoname, buildid, **kwargs): # noqa: E501
"""Get individual algorithm build by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_build_by_id(username, algoname, buildid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str buildid: Build ID (required)
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_algorithm_build_by_id_with_http_info(username, algoname, buildid, **kwargs) # noqa: E501
else:
(data) = self.get_algorithm_build_by_id_with_http_info(username, algoname, buildid, **kwargs) # noqa: E501
return data
def get_algorithm_build_by_id_with_http_info(self, username, algoname, buildid, **kwargs): # noqa: E501
"""Get individual algorithm build by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_build_by_id_with_http_info(username, algoname, buildid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str buildid: Build ID (required)
:return: Build
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'buildid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_algorithm_build_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `get_algorithm_build_by_id`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `get_algorithm_build_by_id`") # noqa: E501
# verify the required parameter 'buildid' is set
if ('buildid' not in local_var_params or
local_var_params['buildid'] is None):
raise ValueError("Missing the required parameter `buildid` when calling `get_algorithm_build_by_id`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_build_by_id`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_build_by_id`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
if 'buildid' in local_var_params:
path_params['buildid'] = local_var_params['buildid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/builds/{buildid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Build', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_algorithm_build_logs(self, username, algoname, buildid, **kwargs): # noqa: E501
"""Get Algorithm build logs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_build_logs(username, algoname, buildid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str buildid: Build ID (required)
:return: BuildLogs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_algorithm_build_logs_with_http_info(username, algoname, buildid, **kwargs) # noqa: E501
else:
(data) = self.get_algorithm_build_logs_with_http_info(username, algoname, buildid, **kwargs) # noqa: E501
return data
def get_algorithm_build_logs_with_http_info(self, username, algoname, buildid, **kwargs): # noqa: E501
"""Get Algorithm build logs # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_build_logs_with_http_info(username, algoname, buildid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str buildid: Build ID (required)
:return: BuildLogs
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'buildid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_algorithm_build_logs" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `get_algorithm_build_logs`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `get_algorithm_build_logs`") # noqa: E501
# verify the required parameter 'buildid' is set
if ('buildid' not in local_var_params or
local_var_params['buildid'] is None):
raise ValueError("Missing the required parameter `buildid` when calling `get_algorithm_build_logs`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_build_logs`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_build_logs`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
if 'buildid' in local_var_params:
path_params['buildid'] = local_var_params['buildid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/builds/{buildid}/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BuildLogs', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_algorithm_builds(self, username, algoname, **kwargs): # noqa: E501
"""Get Algorithm builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_builds(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedBuildsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_algorithm_builds_with_http_info(username, algoname, **kwargs) # noqa: E501
else:
(data) = self.get_algorithm_builds_with_http_info(username, algoname, **kwargs) # noqa: E501
return data
def get_algorithm_builds_with_http_info(self, username, algoname, **kwargs): # noqa: E501
"""Get Algorithm builds # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_builds_with_http_info(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedBuildsList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'limit', 'marker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_algorithm_builds" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `get_algorithm_builds`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `get_algorithm_builds`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_builds`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_builds`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_algorithm_builds`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/builds', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedBuildsList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_algorithm_hash_version(self, username, algoname, algohash, **kwargs): # noqa: E501
"""Get Algorithm by Hash Version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_hash_version(username, algoname, algohash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str algohash: Algorithm hash (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_algorithm_hash_version_with_http_info(username, algoname, algohash, **kwargs) # noqa: E501
else:
(data) = self.get_algorithm_hash_version_with_http_info(username, algoname, algohash, **kwargs) # noqa: E501
return data
def get_algorithm_hash_version_with_http_info(self, username, algoname, algohash, **kwargs): # noqa: E501
"""Get Algorithm by Hash Version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_hash_version_with_http_info(username, algoname, algohash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param str algohash: Algorithm hash (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'algohash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_algorithm_hash_version" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `get_algorithm_hash_version`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `get_algorithm_hash_version`") # noqa: E501
# verify the required parameter 'algohash' is set
if ('algohash' not in local_var_params or
local_var_params['algohash'] is None):
raise ValueError("Missing the required parameter `algohash` when calling `get_algorithm_hash_version`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_hash_version`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_hash_version`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
if 'algohash' in local_var_params:
path_params['algohash'] = local_var_params['algohash'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/versions/{algohash}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_algorithm_versions(self, username, algoname, **kwargs): # noqa: E501
"""Get Algorithm versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_versions(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:param bool published: Algorithm version's published state
:param bool callable: Algorithm version's callable visibility
:return: PagedVersionsList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_algorithm_versions_with_http_info(username, algoname, **kwargs) # noqa: E501
else:
(data) = self.get_algorithm_versions_with_http_info(username, algoname, **kwargs) # noqa: E501
return data
def get_algorithm_versions_with_http_info(self, username, algoname, **kwargs): # noqa: E501
"""Get Algorithm versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_algorithm_versions_with_http_info(username, algoname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:param bool published: Algorithm version's published state
:param bool callable: Algorithm version's callable visibility
:return: PagedVersionsList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'limit', 'marker', 'published', 'callable'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_algorithm_versions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `get_algorithm_versions`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `get_algorithm_versions`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_versions`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `get_algorithm_versions`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_algorithm_versions`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker'])) # noqa: E501
if 'published' in local_var_params:
query_params.append(('published', local_var_params['published'])) # noqa: E501
if 'callable' in local_var_params:
query_params.append(('callable', local_var_params['callable'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/versions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedVersionsList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_config_entry(self, keyname, **kwargs): # noqa: E501
"""Get a frontend config entry represeted by the provided keyname # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_config_entry(keyname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str keyname: The keyname for the config map entry (required)
:return: FrontendConfigurationEntry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_config_entry_with_http_info(keyname, **kwargs) # noqa: E501
else:
(data) = self.get_config_entry_with_http_info(keyname, **kwargs) # noqa: E501
return data
def get_config_entry_with_http_info(self, keyname, **kwargs): # noqa: E501
"""Get a frontend config entry represeted by the provided keyname # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_config_entry_with_http_info(keyname, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str keyname: The keyname for the config map entry (required)
:return: FrontendConfigurationEntry
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['keyname'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_config_entry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'keyname' is set
if ('keyname' not in local_var_params or
local_var_params['keyname'] is None):
raise ValueError("Missing the required parameter `keyname` when calling `get_config_entry`") # noqa: E501
if ('keyname' in local_var_params and
len(local_var_params['keyname']) > 64):
raise ValueError("Invalid value for parameter `keyname` when calling `get_config_entry`, length must be less than or equal to `64`") # noqa: E501
if 'keyname' in local_var_params and not re.search(r'^[\\w][\\w-.]*[\\w]$', local_var_params['keyname']): # noqa: E501
raise ValueError("Invalid value for parameter `keyname` when calling `get_config_entry`, must conform to the pattern `/^[\\w][\\w-.]*[\\w]$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'keyname' in local_var_params:
path_params['keyname'] = local_var_params['keyname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/frontend/{keyname}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FrontendConfigurationEntry', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_favicon(self, **kwargs): # noqa: E501
"""Get favicon for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_favicon(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_favicon_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_favicon_with_http_info(**kwargs) # noqa: E501
return data
def get_favicon_with_http_info(self, **kwargs): # noqa: E501
"""Get favicon for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_favicon_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_favicon" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/favicon', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_frontend_config_entries(self, **kwargs): # noqa: E501
"""Lists all frontend configuration values # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_frontend_config_entries(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_frontend_config_entries_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_frontend_config_entries_with_http_info(**kwargs) # noqa: E501
return data
def get_frontend_config_entries_with_http_info(self, **kwargs): # noqa: E501
"""Lists all frontend configuration values # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_frontend_config_entries_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['limit', 'marker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_frontend_config_entries" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_frontend_config_entries`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/frontend', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedResultList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_logo(self, **kwargs): # noqa: E501
"""Get logo for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logo(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_logo_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_logo_with_http_info(**kwargs) # noqa: E501
return data
def get_logo_with_http_info(self, **kwargs): # noqa: E501
"""Get logo for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_logo_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_logo" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/logo', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_organization(self, org_name, **kwargs): # noqa: E501
"""Get an organization represented by the provided orgName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_organization(org_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_organization_with_http_info(org_name, **kwargs) # noqa: E501
else:
(data) = self.get_organization_with_http_info(org_name, **kwargs) # noqa: E501
return data
def get_organization_with_http_info(self, org_name, **kwargs): # noqa: E501
"""Get an organization represented by the provided orgName # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_organization_with_http_info(org_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['org_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_organization" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'org_name' is set
if ('org_name' not in local_var_params or
local_var_params['org_name'] is None):
raise ValueError("Missing the required parameter `org_name` when calling `get_organization`") # noqa: E501
if ('org_name' in local_var_params and
len(local_var_params['org_name']) > 30):
raise ValueError("Invalid value for parameter `org_name` when calling `get_organization`, length must be less than or equal to `30`") # noqa: E501
if 'org_name' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['org_name']): # noqa: E501
raise ValueError("Invalid value for parameter `org_name` when calling `get_organization`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org_name' in local_var_params:
path_params['orgName'] = local_var_params['org_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/organizations/{orgName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Organization', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_organizations(self, **kwargs): # noqa: E501
"""Lists all organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_organizations(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_organizations_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_organizations_with_http_info(**kwargs) # noqa: E501
return data
def get_organizations_with_http_info(self, **kwargs): # noqa: E501
"""Lists all organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_organizations_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['limit', 'marker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_organizations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_organizations`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/organizations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedResultList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_package_set_builds(self, package_set_name, **kwargs): # noqa: E501
"""Name of a package set # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_package_set_builds(package_set_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_set_name: The name of a package set (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_package_set_builds_with_http_info(package_set_name, **kwargs) # noqa: E501
else:
(data) = self.get_package_set_builds_with_http_info(package_set_name, **kwargs) # noqa: E501
return data
def get_package_set_builds_with_http_info(self, package_set_name, **kwargs): # noqa: E501
"""Name of a package set # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_package_set_builds_with_http_info(package_set_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_set_name: The name of a package set (required)
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_set_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_package_set_builds" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_set_name' is set
if ('package_set_name' not in local_var_params or
local_var_params['package_set_name'] is None):
raise ValueError("Missing the required parameter `package_set_name` when calling `get_package_set_builds`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_set_name' in local_var_params:
path_params['package_set_name'] = local_var_params['package_set_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/package-sets/{package_set_name}/builds', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user(self, username, **kwargs): # noqa: E501
"""Get a user represented by the provided username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.get_user_with_http_info(username, **kwargs) # noqa: E501
return data
def get_user_with_http_info(self, username, **kwargs): # noqa: E501
"""Get a user represented by the provided username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `get_user`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `get_user`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `get_user`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_users(self, **kwargs): # noqa: E501
"""Lists all managed users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_users_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_users_with_http_info(**kwargs) # noqa: E501
return data
def get_users_with_http_info(self, **kwargs): # noqa: E501
"""Lists all managed users # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int limit: The number of results to return in a single page
:param str marker: Encoded pagination cursor
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['limit', 'marker'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_users" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_users`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedResultList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_entities_for_label(self, username, label, **kwargs): # noqa: E501
"""List the unique set of entities associated with the token associated to the connection established between the user and the SCM configuration (determined by label) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_entities_for_label(username, label, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str label: The SCM configuration label (required)
:return: ScmEntities
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_entities_for_label_with_http_info(username, label, **kwargs) # noqa: E501
else:
(data) = self.list_entities_for_label_with_http_info(username, label, **kwargs) # noqa: E501
return data
def list_entities_for_label_with_http_info(self, username, label, **kwargs): # noqa: E501
"""List the unique set of entities associated with the token associated to the connection established between the user and the SCM configuration (determined by label) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_entities_for_label_with_http_info(username, label, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str label: The SCM configuration label (required)
:return: ScmEntities
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'label'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_entities_for_label" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `list_entities_for_label`") # noqa: E501
# verify the required parameter 'label' is set
if ('label' not in local_var_params or
local_var_params['label'] is None):
raise ValueError("Missing the required parameter `label` when calling `list_entities_for_label`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `list_entities_for_label`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `list_entities_for_label`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
if ('label' in local_var_params and
len(local_var_params['label']) > 60):
raise ValueError("Invalid value for parameter `label` when calling `list_entities_for_label`, length must be less than or equal to `60`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'label' in local_var_params:
path_params['label'] = local_var_params['label'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/scm-entities/{label}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScmEntities', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_members(self, org_name, **kwargs): # noqa: E501
"""List of members with their roles in a given organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_members(org_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_members_with_http_info(org_name, **kwargs) # noqa: E501
else:
(data) = self.list_members_with_http_info(org_name, **kwargs) # noqa: E501
return data
def list_members_with_http_info(self, org_name, **kwargs): # noqa: E501
"""List of members with their roles in a given organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_members_with_http_info(org_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:return: PagedResultList
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['org_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_members" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'org_name' is set
if ('org_name' not in local_var_params or
local_var_params['org_name'] is None):
raise ValueError("Missing the required parameter `org_name` when calling `list_members`") # noqa: E501
if ('org_name' in local_var_params and
len(local_var_params['org_name']) > 30):
raise ValueError("Invalid value for parameter `org_name` when calling `list_members`, length must be less than or equal to `30`") # noqa: E501
if 'org_name' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['org_name']): # noqa: E501
raise ValueError("Invalid value for parameter `org_name` when calling `list_members`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org_name' in local_var_params:
path_params['orgName'] = local_var_params['org_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/organizations/{orgName}/members', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PagedResultList', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_scm_connections(self, username, **kwargs): # noqa: E501
"""List the SCM connections associated with this user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_scm_connections(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:return: ScmConnections
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_scm_connections_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.list_scm_connections_with_http_info(username, **kwargs) # noqa: E501
return data
def list_scm_connections_with_http_info(self, username, **kwargs): # noqa: E501
"""List the SCM connections associated with this user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_scm_connections_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:return: ScmConnections
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_scm_connections" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `list_scm_connections`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `list_scm_connections`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `list_scm_connections`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/scm-connections', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScmConnections', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def list_scm_integrations(self, **kwargs): # noqa: E501
"""Get the list of SCM systems that have been configured with this cluster # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_scm_integrations(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ScmIntegrations
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_scm_integrations_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_scm_integrations_with_http_info(**kwargs) # noqa: E501
return data
def list_scm_integrations_with_http_info(self, **kwargs): # noqa: E501
"""Get the list of SCM systems that have been configured with this cluster # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_scm_integrations_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ScmIntegrations
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_scm_integrations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/scm-integrations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScmIntegrations', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def publish_algorithm(self, username, algoname, version_request, **kwargs): # noqa: E501
"""Publish Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.publish_algorithm(username, algoname, version_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param VersionRequest version_request: Publish Version Request (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.publish_algorithm_with_http_info(username, algoname, version_request, **kwargs) # noqa: E501
else:
(data) = self.publish_algorithm_with_http_info(username, algoname, version_request, **kwargs) # noqa: E501
return data
def publish_algorithm_with_http_info(self, username, algoname, version_request, **kwargs): # noqa: E501
"""Publish Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.publish_algorithm_with_http_info(username, algoname, version_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param VersionRequest version_request: Publish Version Request (required)
:return: VersionResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'version_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method publish_algorithm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `publish_algorithm`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `publish_algorithm`") # noqa: E501
# verify the required parameter 'version_request' is set
if ('version_request' not in local_var_params or
local_var_params['version_request'] is None):
raise ValueError("Missing the required parameter `version_request` when calling `publish_algorithm`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `publish_algorithm`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `publish_algorithm`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'version_request' in local_var_params:
body_params = local_var_params['version_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}/versions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VersionResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_organization_member(self, org_name, username, **kwargs): # noqa: E501
"""Remove a user from an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_organization_member(org_name, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:param str username: The user's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_organization_member_with_http_info(org_name, username, **kwargs) # noqa: E501
else:
(data) = self.remove_organization_member_with_http_info(org_name, username, **kwargs) # noqa: E501
return data
def remove_organization_member_with_http_info(self, org_name, username, **kwargs): # noqa: E501
"""Remove a user from an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_organization_member_with_http_info(org_name, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org_name: Organization name (required)
:param str username: The user's username (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['org_name', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_organization_member" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'org_name' is set
if ('org_name' not in local_var_params or
local_var_params['org_name'] is None):
raise ValueError("Missing the required parameter `org_name` when calling `remove_organization_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `remove_organization_member`") # noqa: E501
if ('org_name' in local_var_params and
len(local_var_params['org_name']) > 30):
raise ValueError("Invalid value for parameter `org_name` when calling `remove_organization_member`, length must be less than or equal to `30`") # noqa: E501
if 'org_name' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['org_name']): # noqa: E501
raise ValueError("Invalid value for parameter `org_name` when calling `remove_organization_member`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `remove_organization_member`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `remove_organization_member`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org_name' in local_var_params:
path_params['orgName'] = local_var_params['org_name'] # noqa: E501
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/organizations/{orgName}/members/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_algorithm(self, username, algoname, update_request, **kwargs): # noqa: E501
"""Update Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_algorithm(username, algoname, update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param UpdateRequest update_request: Algorithm Update Request (required)
:return: HashResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_algorithm_with_http_info(username, algoname, update_request, **kwargs) # noqa: E501
else:
(data) = self.update_algorithm_with_http_info(username, algoname, update_request, **kwargs) # noqa: E501
return data
def update_algorithm_with_http_info(self, username, algoname, update_request, **kwargs): # noqa: E501
"""Update Algorithm # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_algorithm_with_http_info(username, algoname, update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str algoname: Algorithm name (required)
:param UpdateRequest update_request: Algorithm Update Request (required)
:return: HashResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'algoname', 'update_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_algorithm" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `update_algorithm`") # noqa: E501
# verify the required parameter 'algoname' is set
if ('algoname' not in local_var_params or
local_var_params['algoname'] is None):
raise ValueError("Missing the required parameter `algoname` when calling `update_algorithm`") # noqa: E501
# verify the required parameter 'update_request' is set
if ('update_request' not in local_var_params or
local_var_params['update_request'] is None):
raise ValueError("Missing the required parameter `update_request` when calling `update_algorithm`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `update_algorithm`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `update_algorithm`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'algoname' in local_var_params:
path_params['algoname'] = local_var_params['algoname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_request' in local_var_params:
body_params = local_var_params['update_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/algorithms/{username}/{algoname}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HashResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_build_status(self, build_id, image_type, status_update_request, **kwargs): # noqa: E501
"""Update the status of a package set build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_build_status(build_id, image_type, status_update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int build_id: The id of the package set build (required)
:param str image_type: The image type dependency of the package set build (required)
:param StatusUpdateRequest status_update_request: The body expected for a package set build status update (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_build_status_with_http_info(build_id, image_type, status_update_request, **kwargs) # noqa: E501
else:
(data) = self.update_build_status_with_http_info(build_id, image_type, status_update_request, **kwargs) # noqa: E501
return data
def update_build_status_with_http_info(self, build_id, image_type, status_update_request, **kwargs): # noqa: E501
"""Update the status of a package set build # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_build_status_with_http_info(build_id, image_type, status_update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int build_id: The id of the package set build (required)
:param str image_type: The image type dependency of the package set build (required)
:param StatusUpdateRequest status_update_request: The body expected for a package set build status update (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['build_id', 'image_type', 'status_update_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_build_status" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'build_id' is set
if ('build_id' not in local_var_params or
local_var_params['build_id'] is None):
raise ValueError("Missing the required parameter `build_id` when calling `update_build_status`") # noqa: E501
# verify the required parameter 'image_type' is set
if ('image_type' not in local_var_params or
local_var_params['image_type'] is None):
raise ValueError("Missing the required parameter `image_type` when calling `update_build_status`") # noqa: E501
# verify the required parameter 'status_update_request' is set
if ('status_update_request' not in local_var_params or
local_var_params['status_update_request'] is None):
raise ValueError("Missing the required parameter `status_update_request` when calling `update_build_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'build_id' in local_var_params:
path_params['build_id'] = local_var_params['build_id'] # noqa: E501
if 'image_type' in local_var_params:
path_params['image_type'] = local_var_params['image_type'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'status_update_request' in local_var_params:
body_params = local_var_params['status_update_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/package-sets/builds/{build_id}/{image_type}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_config_entry(self, keyname, frontend_configuration_entry, **kwargs): # noqa: E501
"""Update or insert a config entry represented by the provided keyname # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_config_entry(keyname, frontend_configuration_entry, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str keyname: The keyname for the config map entry (required)
:param FrontendConfigurationEntry frontend_configuration_entry: Request with config entry (required)
:return: FrontendConfigurationEntry
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_config_entry_with_http_info(keyname, frontend_configuration_entry, **kwargs) # noqa: E501
else:
(data) = self.update_config_entry_with_http_info(keyname, frontend_configuration_entry, **kwargs) # noqa: E501
return data
def update_config_entry_with_http_info(self, keyname, frontend_configuration_entry, **kwargs): # noqa: E501
"""Update or insert a config entry represented by the provided keyname # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_config_entry_with_http_info(keyname, frontend_configuration_entry, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str keyname: The keyname for the config map entry (required)
:param FrontendConfigurationEntry frontend_configuration_entry: Request with config entry (required)
:return: FrontendConfigurationEntry
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['keyname', 'frontend_configuration_entry'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_config_entry" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'keyname' is set
if ('keyname' not in local_var_params or
local_var_params['keyname'] is None):
raise ValueError("Missing the required parameter `keyname` when calling `update_config_entry`") # noqa: E501
# verify the required parameter 'frontend_configuration_entry' is set
if ('frontend_configuration_entry' not in local_var_params or
local_var_params['frontend_configuration_entry'] is None):
raise ValueError("Missing the required parameter `frontend_configuration_entry` when calling `update_config_entry`") # noqa: E501
if ('keyname' in local_var_params and
len(local_var_params['keyname']) > 64):
raise ValueError("Invalid value for parameter `keyname` when calling `update_config_entry`, length must be less than or equal to `64`") # noqa: E501
if 'keyname' in local_var_params and not re.search(r'^[\\w][\\w-.]*[\\w]$', local_var_params['keyname']): # noqa: E501
raise ValueError("Invalid value for parameter `keyname` when calling `update_config_entry`, must conform to the pattern `/^[\\w][\\w-.]*[\\w]$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'keyname' in local_var_params:
path_params['keyname'] = local_var_params['keyname'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'frontend_configuration_entry' in local_var_params:
body_params = local_var_params['frontend_configuration_entry']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/frontend/{keyname}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FrontendConfigurationEntry', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_favicon(self, **kwargs): # noqa: E501
"""Update favicon for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_favicon(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_favicon_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.update_favicon_with_http_info(**kwargs) # noqa: E501
return data
def update_favicon_with_http_info(self, **kwargs): # noqa: E501
"""Update favicon for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_favicon_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_favicon" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/favicon', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_logo(self, **kwargs): # noqa: E501
"""Update logo for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_logo(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_logo_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.update_logo_with_http_info(**kwargs) # noqa: E501
return data
def update_logo_with_http_info(self, **kwargs): # noqa: E501
"""Update logo for the Algorithmia instance # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_logo_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_logo" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/config/logo', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_package_set(self, package_set_name, package_set_update_request_body, **kwargs): # noqa: E501
"""Update a package set # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_package_set(package_set_name, package_set_update_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_set_name: The name of a package set (required)
:param PackageSetUpdateRequestBody package_set_update_request_body: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_package_set_with_http_info(package_set_name, package_set_update_request_body, **kwargs) # noqa: E501
else:
(data) = self.update_package_set_with_http_info(package_set_name, package_set_update_request_body, **kwargs) # noqa: E501
return data
def update_package_set_with_http_info(self, package_set_name, package_set_update_request_body, **kwargs): # noqa: E501
"""Update a package set # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_package_set_with_http_info(package_set_name, package_set_update_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str package_set_name: The name of a package set (required)
:param PackageSetUpdateRequestBody package_set_update_request_body: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['package_set_name', 'package_set_update_request_body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_package_set" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'package_set_name' is set
if ('package_set_name' not in local_var_params or
local_var_params['package_set_name'] is None):
raise ValueError("Missing the required parameter `package_set_name` when calling `update_package_set`") # noqa: E501
# verify the required parameter 'package_set_update_request_body' is set
if ('package_set_update_request_body' not in local_var_params or
local_var_params['package_set_update_request_body'] is None):
raise ValueError("Missing the required parameter `package_set_update_request_body` when calling `update_package_set`") # noqa: E501
collection_formats = {}
path_params = {}
if 'package_set_name' in local_var_params:
path_params['package_set_name'] = local_var_params['package_set_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'package_set_update_request_body' in local_var_params:
body_params = local_var_params['package_set_update_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/package-sets/{package_set_name}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_scm_connection_owner(self, username, label, scm_connection_owner, **kwargs): # noqa: E501
"""Update the owner of the SCM connection # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_scm_connection_owner(username, label, scm_connection_owner, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str label: The SCM configuration label (required)
:param ScmConnectionOwner scm_connection_owner: Body containing the new owner of a connection (required)
:return: ScmConnectionWithOauthUsername
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_scm_connection_owner_with_http_info(username, label, scm_connection_owner, **kwargs) # noqa: E501
else:
(data) = self.update_scm_connection_owner_with_http_info(username, label, scm_connection_owner, **kwargs) # noqa: E501
return data
def update_scm_connection_owner_with_http_info(self, username, label, scm_connection_owner, **kwargs): # noqa: E501
"""Update the owner of the SCM connection # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_scm_connection_owner_with_http_info(username, label, scm_connection_owner, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param str label: The SCM configuration label (required)
:param ScmConnectionOwner scm_connection_owner: Body containing the new owner of a connection (required)
:return: ScmConnectionWithOauthUsername
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'label', 'scm_connection_owner'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_scm_connection_owner" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `update_scm_connection_owner`") # noqa: E501
# verify the required parameter 'label' is set
if ('label' not in local_var_params or
local_var_params['label'] is None):
raise ValueError("Missing the required parameter `label` when calling `update_scm_connection_owner`") # noqa: E501
# verify the required parameter 'scm_connection_owner' is set
if ('scm_connection_owner' not in local_var_params or
local_var_params['scm_connection_owner'] is None):
raise ValueError("Missing the required parameter `scm_connection_owner` when calling `update_scm_connection_owner`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `update_scm_connection_owner`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `update_scm_connection_owner`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
if ('label' in local_var_params and
len(local_var_params['label']) > 60):
raise ValueError("Invalid value for parameter `label` when calling `update_scm_connection_owner`, length must be less than or equal to `60`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'label' in local_var_params:
path_params['label'] = local_var_params['label'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'scm_connection_owner' in local_var_params:
body_params = local_var_params['scm_connection_owner']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/scm-connections/{label}/owner', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ScmConnectionWithOauthUsername', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user(self, username, user, **kwargs): # noqa: E501
"""Update a user represented by the provided username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user(username, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param User user: Request with User (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_user_with_http_info(username, user, **kwargs) # noqa: E501
else:
(data) = self.update_user_with_http_info(username, user, **kwargs) # noqa: E501
return data
def update_user_with_http_info(self, username, user, **kwargs): # noqa: E501
"""Update a user represented by the provided username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_with_http_info(username, user, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param User user: Request with User (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'user'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `update_user`") # noqa: E501
# verify the required parameter 'user' is set
if ('user' not in local_var_params or
local_var_params['user'] is None):
raise ValueError("Missing the required parameter `user` when calling `update_user`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `update_user`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `update_user`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'user' in local_var_params:
body_params = local_var_params['user']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users/{username}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_profile_image(self, username, **kwargs): # noqa: E501
"""Upload a profile picture # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_profile_image(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param file avatar:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.upload_profile_image_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.upload_profile_image_with_http_info(username, **kwargs) # noqa: E501
return data
def upload_profile_image_with_http_info(self, username, **kwargs): # noqa: E501
"""Upload a profile picture # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_profile_image_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The user's username (required)
:param file avatar:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'avatar'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_profile_image" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `upload_profile_image`") # noqa: E501
if ('username' in local_var_params and
len(local_var_params['username']) > 30):
raise ValueError("Invalid value for parameter `username` when calling `upload_profile_image`, length must be less than or equal to `30`") # noqa: E501
if 'username' in local_var_params and not re.search(r'^[a-zA-Z][a-zA-Z0-9_]*$', local_var_params['username']): # noqa: E501
raise ValueError("Invalid value for parameter `username` when calling `upload_profile_image`, must conform to the pattern `/^[a-zA-Z][a-zA-Z0-9_]*$/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'avatar' in local_var_params:
local_var_files['avatar'] = local_var_params['avatar'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['ApiKeyAuth'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/avatar', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.524624 | 198 | 0.629869 | 23,241 | 195,285 | 5.018932 | 0.013209 | 0.056033 | 0.087856 | 0.027159 | 0.98318 | 0.979005 | 0.972969 | 0.964208 | 0.956895 | 0.949651 | 0 | 0.016941 | 0.279694 | 195,285 | 4,385 | 199 | 44.534778 | 0.812299 | 0.284594 | 0 | 0.795664 | 0 | 0.026094 | 0.237982 | 0.06177 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034123 | false | 0 | 0.001606 | 0 | 0.086712 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
913067ebe4bffe0f2bd8bed627f3b5b1d69e4084 | 164 | py | Python | tests/donn/__init__.py | sharan-amutharasu/DONN | c14557e8ef57f3e1c1b73c1fa98cb6ba19a82904 | [
"MIT"
] | 3 | 2018-08-17T05:31:25.000Z | 2020-02-13T19:43:02.000Z | donn/__init__.py | sharan-amutharasu/DONN | c14557e8ef57f3e1c1b73c1fa98cb6ba19a82904 | [
"MIT"
] | 1 | 2018-11-19T06:16:50.000Z | 2018-11-19T06:17:53.000Z | donn/__init__.py | sharan-amutharasu/DONN | c14557e8ef57f3e1c1b73c1fa98cb6ba19a82904 | [
"MIT"
] | 2 | 2018-12-06T05:01:07.000Z | 2018-12-06T11:59:47.000Z | from . import main
from .main import Optimizer
from .main import predict
from .main import allowed_layers
from .base_model import run_base_model
from .main import * | 27.333333 | 38 | 0.817073 | 26 | 164 | 5 | 0.384615 | 0.246154 | 0.430769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.140244 | 164 | 6 | 39 | 27.333333 | 0.921986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
914c3389ec5163dc853cc07c306abc1a2c2f3e42 | 28,711 | py | Python | base/bin/decode.py | HackX07164/whatsapp-X | 28846e16f0e7a6c5ea0f47a0dc1ef0a9d2ba0593 | [
"MIT"
] | 1 | 2021-10-12T12:31:12.000Z | 2021-10-12T12:31:12.000Z | base/bin/decode.py | HackX07164/HackerMode | 3e48fd742cb91916a62f3cf3044a8fc9f1a5ea30 | [
"MIT"
] | null | null | null | base/bin/decode.py | HackX07164/HackerMode | 3e48fd742cb91916a62f3cf3044a8fc9f1a5ea30 | [
"MIT"
] | null | null | null | # Encoded by HackerMode tool...
# Copyright: PSH-TEAM
# Follow us on telegram ( @psh_team )
import marshal
exec(marshal.loads(b'\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00@\x00\x00\x00sl\x00\x00\x00d\x00Z\x00e\x00r\x10d\x01d\x02\x84!Z\x01e\x02e\x03d\x03\x83\x02\x8f(Z\x04e\x04\xa0\x05\xa1\x00Z\x06d\x04e\x06v\x01r4e\x07d\x05\x83\x01\x01\x00W\x00d\x06\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00sH0\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00d\x02d\x06l\x08Z\x08e\te\x08\xa0\nd\x07\xa1\x01\x83\x01\x01\x00d\x06S\x00)\x08F\xe9\x01\x00\x00\x00\xe9\x00\x00\x00\x00\xda\x01rz[# Encoded by HackerMode tool...\n# Copyright: PSH-TEAM\n# Follow us on telegram ( @psh_team )z\x89please add the encode copyright to your file:\n# Encoded by HackerMode tool...\n# Copyright: PSH-TEAM\n# Follow us on telegram ( @psh_team )NsO+\x00\x00\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00@\x00\x00\x00s8\x03\x00\x00U\x00d\x00Z\x00e\x00r\x12d\x01d\x02\x84!Z\x01e\x02e\x03d\x03\x83\x02\x8f(Z\x04e\x04\xa0\x05\xa1\x00Z\x06d\x04e\x06v\x01r6e\x07d\x05\x83\x01\x01\x00W\x00d\x06\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00sJ0\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00d\x02d\x06l\x08Z\x08d\x02d\x06l\tZ\td\x02d\x06l\nZ\nd\x02d\x06l\x0bZ\x0bd\x02d\x06l\x0cZ\x0cd\x02d\x06l\rZ\rd\x02d\x06l\x0eZ\x0ed\x02d\x06l\x0fZ\x0fd\x02d\x06l\x10Z\x10d\x02d\x06l\x11Z\x11d\x02d\x06l\x12Z\x12d\x02d\x07l\x13m\x14Z\x14m\x15Z\x15\x01\x00d\x02d\x08l\x16m\x17Z\x17\x01\x00d\x02d\tl\x18m\x19Z\x19\x01\x00d\x02d\nl\x1am\x1bZ\x1b\x01\x00d\x02d\x0bl\x1cm\x1dZ\x1d\x01\x00e\x0bj\x1e\xa0\x1fe\x03\xa0 d\x0cd\r\xa1\x02d\x02\x19\x00\xa1\x01\x01\x00e\x11j!j"Z"e#e$d\x0e<\x00d\x0fd\x10g\x02Z%e\x14e#\x19\x00e$d\x11<\x00d\x12Z&e#e$d\x13<\x00e\'d\x14\x83\x01j(Z)e\'d\x15\x83\x01j*Z+d\x16Z,e-e$d\x17<\x00e.Z/e0Z1d\x18Z2e\x15e-\x19\x00e$d\x19<\x00d\x1a\xa03\xa1\x00Z4e-e$d\x1b<\x00G\x00d\x1cd\x1d\x84\x00d\x1d\x83\x02Z5G\x00d\x1ed\x1f\x84\x00d\x1f\x83\x02Z6d d!\x84\x00Z\x06d"d#\x84\x00Z7d$d%\x84\x00Z8e9d&k\x02\x90\x03r4e:e\x0bj;\x83\x01d\'k\x02\x90\x03r\x00e\x0bj;d(\x19\x00\xa0<\xa1\x00d)k\x02\x90\x03r\x00e\n\xa0=d*\xa1\x01\x01\x00d\x02Z>d\x00Z?e@d+e>\x9b\x00\x9d\x02\x83\x01\x01\x00e?\x90\x02s\x18e\x06e\x0bj;d\x01\x19\x00\x83\x01ZAd,Z?n\x0ee\x06e\x0bj;d\r\x19\x00\x83\x01ZAz\x14e6eAe\x0bj;d\r\x19\x00\x83\x02ZBW\x00n"\x04\x00eC\x90\x02y\\\x01\x00\x01\x00\x01\x00e@d-\x83\x01\x01\x00Y\x00\x90\x03q4Y\x00n\x020\x00e>d\x017\x00Z>eDeBjE\x83\x01e-k\x02\x90\x02r\x84e8e\x0bj;d\r\x19\x00\x83\x01\x01\x00e\x17\x83\x00ZFe\x12jGe7d.\x8d\x01ZHeH\xa0I\xa1\x00\x01\x00eH\xa0Jd\r\xa1\x01\x01\x00eH\xa0K\xa1\x00\x90\x02r\xc2eH\xa0L\xa1\x00\x01\x00e@d/\x83\x01\x01\x00e@d0\x83\x01\x01\x00eBjEeAk\x02\x90\x02r\xda\x90\x03q4eMd1\x83\x01\xa0<\xa1\x00d2k\x02\x90\x02r\xf0\x90\x03q4e\n\xa0=d*\xa1\x01\x01\x00\x90\x01q\xf0n4e:e\x0bj;\x83\x01d(k\x02\x90\x03r,e6e\x06e\x0bj;d\x01\x19\x00\x83\x01e\x0bj;d\r\x19\x00\x83\x02\x01\x00n\x08e@d3\x83\x01\x01\x00d\x06S\x00)4F\xe9\x01\x00\x00\x00\xe9\x00\x00\x00\x00\xda\x01rz[# Encoded by HackerMode tool...\n# Copyright: PSH-TEAM\n# Follow us on telegram ( @psh_team )z\x89please add the encode copyright to your file:\n# Encoded by HackerMode tool...\n# Copyright: PSH-TEAM\n# Follow us on telegram ( @psh_team )N)\x02\xda\x04List\xda\x05Tuple)\x01\xda\x07Console)\x01\xda\x06Syntax)\x01\xda\x0ePYTHON_VERSION)\x01\xda\tdecompile\xfa\x01/\xe9\x02\x00\x00\x00\xda\x0cMAGIC_NUMBERs\x04\x00\x00\x00U\r\r\ns\x04\x00\x00\x00a\r\r\n\xda\rMAGIC_NUMBERSs\x04\x00\x00\x00PK\x03\x04\xda\x10ZIP_MAGIC_NUMBERZ\x06config\xda\x04sizez\x05utf-8\xda\tENCODEING)\n\xda\x04zlib\xda\x07marshal\xda\x06base16\xda\x06base32\xda\x06base64\xda\x06base85z\x0cmachine-codez\x08zip-codez\x0beval-filterz\rexec-function\xda\x0bALGORITHOMSz]\n# Decoded by HackerMode tool...\n# Copyright: PSH-TEAM\n# Follow us on telegram ( @psh_team )\n\xda\tCOPYRIGHTc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00@\x00\x00\x00sD\x00\x00\x00e\x00Z\x01d\x00Z\x02e\x03e\x04e\x05d\x01\x9c\x02d\x02d\x03\x84\x04\x83\x01Z\x06e\x03e\x04d\x04\x9c\x01d\x05d\x06\x84\x04\x83\x01Z\x07e\x03e\x04d\x04\x9c\x01d\x07d\x08\x84\x04\x83\x01Z\x08d\tS\x00)\n\xda\x14CodeSearchAlgorithms)\x02\xda\x06string\xda\x06returnc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s\\\x00\x00\x00d\x01}\x01d\x02}\x02d\x03}\x03t\x00\xa0\x01|\x01|\x00\xa1\x02D\x00](}\x00t\x02|\x00d\x04\x19\x00\x83\x01|\x02k\x04r\x18t\x02|\x00d\x04\x19\x00\x83\x01}\x02|\x00d\x04\x19\x00}\x03q\x18|\x03sLt\x03\x83\x00\x82\x01t\x04d\x05|\x03\x9b\x00d\x06\x9d\x03\x83\x01S\x00)\x07Nz\x1d(((b|bytes\\()["\'])(.+)(["\']))r\x02\x00\x00\x00\xda\x00\xe9\x03\x00\x00\x00z\x02b\'\xfa\x01\')\x05\xda\x02re\xda\x07findall\xda\x03len\xda\tException\xda\x04eval)\x04r\x1a\x00\x00\x00\xda\x07patternZ\x06lengthZ\x0bstring_data\xa9\x00r%\x00\x00\x00\xfa\x0b.decode.pyo\xda\x08bytecodeE\x00\x00\x00s\x14\x00\x00\x00\x00\x02\x04\x01\x04\x01\x04\x01\x10\x01\x10\x01\x0c\x01\n\x01\x04\x01\x06\x01z\x1dCodeSearchAlgorithms.bytecode)\x01r\x1a\x00\x00\x00c\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x06\x00\x00\x00c\x00\x00\x00s\xd2\x00\x00\x00d\x01|\x01\x17\x00d\x02\x17\x00}\x02t\x00t\x01\xa0\x02|\x02|\x00\xa1\x02\x04\x00}\x03\x83\x01d\x03k\x00r*t\x03\x83\x00\x82\x01|\x03D\x00]\x9e}\x04|\x00|\x00\xa0\x04|\x04\xa1\x01|\x00\xa0\x04|\x04\xa1\x01t\x00|\x04\x83\x01\x17\x00\x85\x02\x19\x00}\x05d\x04}\x06|\x00|\x00\xa0\x04|\x04\xa1\x01t\x00|\x04\x83\x01\x17\x00d\x00\x85\x02\x19\x00D\x00]:}\x07|\x07d\x01k\x02r\x86|\x06d\x047\x00}\x06n\x10|\x07d\x05k\x02r\x96|\x06d\x048\x00}\x06|\x05|\x077\x00}\x05|\x06d\x03k\x02rp\x01\x00q\xacqp|\x00|\x00\xa0\x04|\x05\xa1\x01t\x00|\x05\x83\x01\x17\x00d\x00\x85\x02\x19\x00}\x00|\x05V\x00\x01\x00q.d\x00S\x00)\x06N\xfa\x01(z\r(?:[\\s]+)?\\()r\x02\x00\x00\x00r\x01\x00\x00\x00\xfa\x01))\x05r!\x00\x00\x00r\x1f\x00\x00\x00r \x00\x00\x00r"\x00\x00\x00\xda\x04find)\x08r\x1a\x00\x00\x00Z\rfunction_namer$\x00\x00\x00Z\tfunc_possZ\x08func_posZ\rfunction_bodyZ\ropen_bracketsZ\x04_chrr%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x08functionR\x00\x00\x00s \x00\x00\x00\x00\x02\x0c\x01\x18\x01\x06\x01\x08\x01 \x01\x04\x02\x1e\x01\x08\x01\n\x01\x08\x01\x08\x01\x08\x01\x08\x01\x06\x01\x1a\x01z\x1dCodeSearchAlgorithms.functionc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00c\x00\x00\x00s6\x00\x00\x00d\x01}\x01t\x00t\x01\xa0\x02|\x01|\x00\xa1\x02\x04\x00}\x02\x83\x01d\x02k\x00r"t\x03\x83\x00\x82\x01|\x02D\x00]\n}\x03|\x03V\x00\x01\x00q&d\x00S\x00)\x03Nz\x19(["\'](?:\\\\[\\w0-9]+)+["\'])r\x02\x00\x00\x00)\x04r!\x00\x00\x00r\x1f\x00\x00\x00r \x00\x00\x00r"\x00\x00\x00)\x04r\x1a\x00\x00\x00r$\x00\x00\x00Z\x07strings\xda\x04_strr%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\rstring_filterf\x00\x00\x00s\n\x00\x00\x00\x00\x02\x04\x01\x18\x01\x06\x01\x08\x01z"CodeSearchAlgorithms.string_filterN)\t\xda\x08__name__\xda\n__module__\xda\x0c__qualname__\xda\x0cstaticmethod\xda\x03str\xda\x05bytesr\'\x00\x00\x00r+\x00\x00\x00r-\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x19\x00\x00\x00D\x00\x00\x00s\x0c\x00\x00\x00\x08\x01\x02\x01\x12\x0c\x02\x01\x10\x13\x02\x01r\x19\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00@\x00\x00\x00s\xae\x00\x00\x00e\x00Z\x01d\x00Z\x02d\x01d\x02\x84\x00Z\x03e\x04d\x03\x9c\x01d\x04d\x05\x84\x04Z\x05e\x04d\x03\x9c\x01d\x06d\x07\x84\x04Z\x06e\x04d\x03\x9c\x01d\x08d\t\x84\x04Z\x07e\x04d\x03\x9c\x01d\nd\x0b\x84\x04Z\x08e\x04d\x03\x9c\x01d\x0cd\r\x84\x04Z\te\x04d\x03\x9c\x01d\x0ed\x0f\x84\x04Z\ne\x04d\x03\x9c\x01d\x10d\x11\x84\x04Z\x0be\x04d\x03\x9c\x01d\x12d\x13\x84\x04Z\x0ce\x04d\x03\x9c\x01d\x14d\x15\x84\x04Z\re\x04d\x03\x9c\x01d\x16d\x17\x84\x04Z\x0ee\x04d\x03\x9c\x01d\x18d\x19\x84\x04Z\x0fd\x1aS\x00)\x1b\xda\x12DecodingAlgorithmsc\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00s\xda\x01\x00\x00|\x01|\x00_\x00d\x00|\x00_\x01d\x00|\x00_\x02d\x00|\x00_\x03g\x00|\x00_\x04g\x00|\x00_\x05g\x00|\x00_\x06d\x00|\x00_\x07t\x08j\td\x01d\x02t\nd\x03d\x04\x8d\x04rRt\x0bj\x0cd\x05\xa0\r\xa1\x00\x19\x00\x83\x00\x01\x00t\x0ed\x06\x83\x01\x01\x00t\x0fD\x00]\xec}\x03z.|\x00\xa0\x10|\x03\xa0\x11d\x07d\x08\xa1\x02\xa1\x01\x83\x00|\x00_\x00t\x0ed\t|\x03\x9b\x00d\n\x9d\x03d\x0bd\x0c\x8d\x02\x01\x00W\x00n&\x04\x00t\x12y\xb6\x01\x00\x01\x00\x01\x00t\x0ed\r|\x03\x9b\x00d\x0e\x9d\x03\x83\x01\x01\x00Y\x00q^Y\x00n\x020\x00d\x0f|\x03v\x00r\xcat\x0ed\x10\x83\x01\x01\x00q^d\x11}\x04zP|\x00\xa0\x10|\x03\xa1\x01\x83\x00|\x00_\x00|\x04d\x127\x00}\x04t\x0ed\t|\x03\x9b\x00d\x13|\x04\x9b\x00d\n\x9d\x05d\x0bd\x0c\x8d\x02\x01\x00t\x13\xa0\x14d\x14\xa1\x01\x01\x00|\x00j\x00\xa0\x15\xa1\x00\x90\x01s\x1ct\x12\x83\x00\x82\x01W\x00q\xce\x04\x00t\x12\x90\x01y@\x01\x00\x01\x00\x01\x00t\x0ed\x15\x83\x01\x01\x00Y\x00\x90\x01qDY\x00q\xce0\x00q\xce\x01\x00\x90\x01qLq^zZt\x16|\x02d\x16\x83\x02\x8f:}\x05t\x17|\x00j\x00v\x01\x90\x01rx|\x05\xa0\x18t\x17|\x00j\x00\x17\x00\xa1\x01\x01\x00n\x0c|\x05\xa0\x18|\x00j\x00\xa1\x01\x01\x00W\x00d\x00\x04\x00\x04\x00\x83\x03\x01\x00n\x121\x00\x90\x01s\x9a0\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00W\x00n.\x04\x00t\x12\x90\x01y\xd4\x01\x00}\x06\x01\x00z\x14t\x0ed\x17\x83\x01\x01\x00W\x00Y\x00d\x00}\x06~\x06n\nd\x00}\x06~\x060\x000\x00d\x00S\x00)\x18NZ\x07actions\xda\x05DEBUGF)\x02\xda\x04cast\xda\x07defaultZ\x04EXITz\x1bFinding the best algorithm:\xfa\x01-\xda\x01_z\t# \x1b[1;32mu\x08\x00\x00\x00 \xe2\x9c\x93\x1b[0m\xfa\x01\r)\x01\xda\x03endz\t# \x1b[1;31m\xfa\x04\x1b[0m\xda\x06filterr\x1c\x00\x00\x00r\x02\x00\x00\x00r\x01\x00\x00\x00z\x08 layers g{\x14\xaeG\xe1z\x94?u\x16\x00\x00\x00\n# \x1b[1;32mDONE \xe2\x9c\x93\x1b[0m\xda\x01wz\'# \x1b[1;31mFailed to decode the file!\x1b[0m)\x19\xda\tfile_data\xda\x11_custom_exec_data\xda\x14_custom_compile_data\xda\x11_custom_eval_data\xda\x16_custom_exec_data_list\xda\x19_custom_compile_data_list\xda\x16_custom_eval_data_list\xda\x0c_custom_data\xda\x06CONFIG\xda\x03get\xda\x04bool\xda\x03sys\xda\x08__dict__\xda\x05lower\xda\x05printr\x17\x00\x00\x00\xda\x10__getattribute__\xda\x07replacer"\x00\x00\x00\xda\x04time\xda\x05sleep\xda\x05strip\xda\x04openr\x18\x00\x00\x00\xda\x05write)\x07\xda\x04selfr?\x00\x00\x00\xda\tsave_fileZ\nalgogithomZ\x06layers\xda\x04file\xda\x01er%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x08__init__p\x00\x00\x00sP\x00\x00\x00\x00\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x02\x12\x01\x10\x02\x08\x01\x08\x01\x02\x01\x16\x01\x18\x01\x0c\x01\x10\x01\n\x02\x08\x01\x08\x01\x02\x02\x04\x02\x02\x01\x0e\x01\x08\x01\x1a\x01\n\x01\x0c\x01\n\x01\x0e\x01\x08\x01\x0e\x01\x08\x01\x02\x01\x0c\x01\x0c\x01\x12\x020\x01\x10\x01z\x1bDecodingAlgorithms.__init__)\x01r\x1b\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sZ\x00\x00\x00t\x00\xa0\x01t\x02\xa0\x03|\x00j\x04\xa1\x01\xa1\x01}\x01t\x05\xa0\x06\xa1\x00}\x02t\x07d\x01k\x00r&t\x07n\x02d\x02}\x03t\x08|\x03|\x01|\x02d\x03d\x04\x8d\x04\x01\x00d\x05\xa0\t|\x02\xa0\n\xa1\x00\xa0\x0bd\x05\xa1\x01d\x06d\x00\x85\x02\x19\x00\xa1\x01d\x05\x17\x00S\x00)\x07N\xe7333333\x0f@\xe7ffffff\x0e@F\xa9\x01Z\x07showast\xda\x01\n\xe9\x04\x00\x00\x00)\x0cr\x12\x00\x00\x00\xda\x05loadsr\x19\x00\x00\x00r\'\x00\x00\x00r?\x00\x00\x00\xda\x02io\xda\x08StringIOr\x08\x00\x00\x00r\t\x00\x00\x00\xda\x04join\xda\x08getvalue\xda\x05split)\x04rU\x00\x00\x00r\'\x00\x00\x00\xda\x03out\xda\x07versionr%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x12\x00\x00\x00\xa0\x00\x00\x00s\n\x00\x00\x00\x00\x01\x12\x01\x08\x01\x10\x01\x10\x01z\x1aDecodingAlgorithms.marshalc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x18\x00\x00\x00t\x00\xa0\x01t\x02\xa0\x03|\x00j\x04\xa1\x01\xa1\x01\xa0\x05t\x06\xa1\x01S\x00\xa9\x01N)\x07r\x11\x00\x00\x00\xda\ndecompressr\x19\x00\x00\x00r\'\x00\x00\x00r?\x00\x00\x00\xda\x06decoder\x10\x00\x00\x00\xa9\x01rU\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x11\x00\x00\x00\xa7\x00\x00\x00s\n\x00\x00\x00\x00\x01\x04\x01\n\xff\x04\x02\x02\xfez\x17DecodingAlgorithms.zlibc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x18\x00\x00\x00t\x00\xa0\x01t\x02\xa0\x03|\x00j\x04\xa1\x01\xa1\x01\xa0\x05t\x06\xa1\x01S\x00rg\x00\x00\x00)\x07r\x15\x00\x00\x00Z\tb16decoder\x19\x00\x00\x00r\'\x00\x00\x00r?\x00\x00\x00ri\x00\x00\x00r\x10\x00\x00\x00rj\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x13\x00\x00\x00\xac\x00\x00\x00s\n\x00\x00\x00\x00\x01\x04\x01\n\xff\x04\x02\x02\xfez\x19DecodingAlgorithms.base16c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x18\x00\x00\x00t\x00\xa0\x01t\x02\xa0\x03|\x00j\x04\xa1\x01\xa1\x01\xa0\x05t\x06\xa1\x01S\x00rg\x00\x00\x00)\x07r\x15\x00\x00\x00Z\tb32decoder\x19\x00\x00\x00r\'\x00\x00\x00r?\x00\x00\x00ri\x00\x00\x00r\x10\x00\x00\x00rj\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x14\x00\x00\x00\xb1\x00\x00\x00s\n\x00\x00\x00\x00\x01\x04\x01\n\xff\x04\x02\x02\xfez\x19DecodingAlgorithms.base32c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x18\x00\x00\x00t\x00\xa0\x01t\x02\xa0\x03|\x00j\x04\xa1\x01\xa1\x01\xa0\x05t\x06\xa1\x01S\x00rg\x00\x00\x00)\x07r\x15\x00\x00\x00Z\tb64decoder\x19\x00\x00\x00r\'\x00\x00\x00r?\x00\x00\x00ri\x00\x00\x00r\x10\x00\x00\x00rj\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x15\x00\x00\x00\xb6\x00\x00\x00s\n\x00\x00\x00\x00\x01\x04\x01\n\xff\x04\x02\x02\xfez\x19DecodingAlgorithms.base64c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x18\x00\x00\x00t\x00\xa0\x01t\x02\xa0\x03|\x00j\x04\xa1\x01\xa1\x01\xa0\x05t\x06\xa1\x01S\x00rg\x00\x00\x00)\x07r\x15\x00\x00\x00Z\tb85decoder\x19\x00\x00\x00r\'\x00\x00\x00r?\x00\x00\x00ri\x00\x00\x00r\x10\x00\x00\x00rj\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x16\x00\x00\x00\xbb\x00\x00\x00s\n\x00\x00\x00\x00\x01\x04\x01\n\xff\x04\x02\x02\xfez\x19DecodingAlgorithms.base85c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x06\x00\x00\x00\x03\x00\x00\x00s\x16\x01\x00\x00\x87\x00f\x01d\x01d\x02\x84\x08}\x01\x87\x00f\x01d\x03d\x04\x84\x08}\x02\x87\x00f\x01d\x05d\x06\x84\x08}\x03t\x00\x88\x00j\x01\x83\x01\x01\x00t\x02t\x03d\x07\x9c\x02d\x08d\t\x84\x04}\x04\x88\x00j\x04rT|\x04\x88\x00j\x04d\n\x83\x02\x88\x00_\x05n*\x88\x00j\x06rj|\x04\x88\x00j\x06d\x0b\x83\x02\x88\x00_\x05n\x14\x88\x00j\x07r~|\x04\x88\x00j\x07d\x0c\x83\x02\x88\x00_\x05t\x08\x88\x00j\x05\x83\x01t\tk\x02r\x98\x88\x00j\x05\xa0\nt\x0b\xa1\x01S\x00t\x08\x88\x00j\x05\x83\x01t\x03k\x02r\xac\x88\x00j\x05S\x00\x88\x00j\x0cr\xba\x88\x00j\x0c}\x05n\x14\x88\x00j\rr\xc8\x88\x00j\r}\x05n\x06\x88\x00j\x0e}\x05t\x0f\xa0\x10\xa1\x00}\x06t\x11d\rk\x00r\xe2t\x11n\x02d\x0e}\x07t\x12|\x07|\x05|\x06d\x0fd\x10\x8d\x04\x01\x00d\x11\xa0\x13|\x06\xa0\x14\xa1\x00\xa0\x15d\x11\xa1\x01d\x12d\x00\x85\x02\x19\x00\xa1\x01d\x11\x17\x00S\x00)\x13Nc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x04\x00\x00\x00\x17\x00\x00\x00s&\x00\x00\x00|\x00d\x01\x19\x00r"\x88\x00j\x00\xa0\x01|\x00d\x01\x19\x00\xa1\x01\x01\x00|\x00d\x01\x19\x00\x88\x00_\x02d\x00S\x00\xa9\x02Nr\x02\x00\x00\x00)\x03rC\x00\x00\x00\xda\x06appendr@\x00\x00\x00\xa9\x01\xda\x04argsrj\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x04exec\xc1\x00\x00\x00s\x06\x00\x00\x00\x00\x01\x08\x01\x10\x01z.DecodingAlgorithms.exec_function.<locals>.execc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x04\x00\x00\x00\x17\x00\x00\x00s&\x00\x00\x00|\x00d\x01\x19\x00r"\x88\x00j\x00\xa0\x01|\x00d\x01\x19\x00\xa1\x01\x01\x00|\x00d\x01\x19\x00\x88\x00_\x02d\x00S\x00rk\x00\x00\x00)\x03rD\x00\x00\x00rl\x00\x00\x00rA\x00\x00\x00rm\x00\x00\x00rj\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x07compile\xc6\x00\x00\x00s\x06\x00\x00\x00\x00\x01\x08\x01\x10\x01z1DecodingAlgorithms.exec_function.<locals>.compilec\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x04\x00\x00\x00\x17\x00\x00\x00s&\x00\x00\x00|\x00d\x01\x19\x00r"\x88\x00j\x00\xa0\x01|\x00d\x01\x19\x00\xa1\x01\x01\x00|\x00d\x01\x19\x00\x88\x00_\x02d\x00S\x00rk\x00\x00\x00)\x03rE\x00\x00\x00rl\x00\x00\x00rB\x00\x00\x00rm\x00\x00\x00rj\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r#\x00\x00\x00\xcb\x00\x00\x00s\x06\x00\x00\x00\x00\x01\x08\x01\x10\x01z.DecodingAlgorithms.exec_function.<locals>.eval)\x02\xda\x04data\xda\tfunc_namec\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x05\x00\x00\x00S\x00\x00\x00s^\x00\x00\x00d\x01}\x02|\x00D\x00]P}\x03t\x00|\x03\x83\x01t\x01k\x02r"|\x03\xa0\x02t\x03\xa1\x01}\x03t\x00|\x03\x83\x01t\x04k\x02rPt\x05\xa0\x06|\x02|\x03\xa1\x02}\x04t\x07|\x04\x83\x01d\x02k\x02rX|\x03\x02\x00\x01\x00S\x00q\x08|\x03\x02\x00\x01\x00S\x00q\x08d\x00S\x00)\x03Nz$^((?:(?:pass;)+)?\\(?b?\'?(?:\\s+)?#.+)r\x02\x00\x00\x00)\x08\xda\x04typer3\x00\x00\x00ri\x00\x00\x00r\x10\x00\x00\x00r2\x00\x00\x00r\x1f\x00\x00\x00r \x00\x00\x00r!\x00\x00\x00)\x05rq\x00\x00\x00rr\x00\x00\x00r$\x00\x00\x00\xda\x07contentZ\x07commandr%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\tfiltering\xd2\x00\x00\x00s\x12\x00\x00\x00\x00\x01\x04\x01\x08\x01\x0c\x01\n\x01\x0c\x01\x0c\x01\x0c\x01\n\x02z3DecodingAlgorithms.exec_function.<locals>.filteringro\x00\x00\x00rp\x00\x00\x00r#\x00\x00\x00rZ\x00\x00\x00r[\x00\x00\x00Fr\\\x00\x00\x00r]\x00\x00\x00r^\x00\x00\x00)\x16\xda\x08OLD_EXECr?\x00\x00\x00\xda\x04listr2\x00\x00\x00rC\x00\x00\x00rF\x00\x00\x00rD\x00\x00\x00rE\x00\x00\x00rs\x00\x00\x00r3\x00\x00\x00ri\x00\x00\x00r\x10\x00\x00\x00r@\x00\x00\x00rA\x00\x00\x00rB\x00\x00\x00r`\x00\x00\x00ra\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00rb\x00\x00\x00rc\x00\x00\x00rd\x00\x00\x00)\x08rU\x00\x00\x00ro\x00\x00\x00rp\x00\x00\x00r#\x00\x00\x00ru\x00\x00\x00r\'\x00\x00\x00re\x00\x00\x00rf\x00\x00\x00r%\x00\x00\x00rj\x00\x00\x00r&\x00\x00\x00\xda\rexec_function\xc0\x00\x00\x00s0\x00\x00\x00\x00\x01\x0c\x05\x0c\x05\x0c\x05\n\x02\x10\x0c\x06\x01\x10\x01\x06\x01\x10\x01\x06\x01\x0e\x02\x0e\x01\x0c\x01\x0e\x01\x06\x02\x06\x01\x08\x01\x06\x01\x08\x02\x06\x02\x08\x01\x10\x01\x10\x01z DecodingAlgorithms.exec_functionc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\\\x00\x00\x00t\x00\xa0\x01|\x00j\x02d\x01d\x00\x85\x02\x19\x00\xa1\x01}\x01t\x03\xa0\x04\xa1\x00}\x02t\x05d\x02k\x00r(t\x05n\x02d\x03}\x03t\x06|\x03|\x01|\x02d\x04d\x05\x8d\x04\x01\x00|\x02\xa0\x07\xa1\x00d\x06\x17\x00}\x04|\x00j\x02|\x04k\x02rXt\x08\x83\x00\x82\x01|\x04S\x00)\x07N\xe9\x10\x00\x00\x00rZ\x00\x00\x00r[\x00\x00\x00Fr\\\x00\x00\x00r]\x00\x00\x00)\tr\x12\x00\x00\x00r_\x00\x00\x00r?\x00\x00\x00r`\x00\x00\x00ra\x00\x00\x00r\x08\x00\x00\x00r\t\x00\x00\x00rc\x00\x00\x00r"\x00\x00\x00)\x05rU\x00\x00\x00r\'\x00\x00\x00re\x00\x00\x00rf\x00\x00\x00rq\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x0cmachine_code\xf6\x00\x00\x00s\x10\x00\x00\x00\x00\x01\x14\x01\x08\x01\x10\x01\x10\x01\x0c\x01\n\x01\x06\x01z\x1fDecodingAlgorithms.machine_codec\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\n\x00\x00\x00C\x00\x00\x00s\x9a\x00\x00\x00t\x00\xa0\x01|\x00j\x02d\x01\xa1\x02\x8fx}\x01|\x01\xa0\x03\xa1\x00D\x00]^}\x02|\x02j\x04d\x02k\x02r\x18|\x01\xa0\x05|\x02d\x03\xa1\x02\x01\x00t\x06|\x02j\x04d\x01\x83\x02\x8f\x18}\x03|\x03\xa0\x07\xa1\x00}\x04W\x00d\x00\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00s\\0\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00t\x08\xa0\t|\x02j\x04\xa1\x01\x01\x00\x01\x00qxq\x18W\x00d\x00\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00s\x8c0\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00|\x04S\x00)\x04Nr\x03\x00\x00\x00z\x0b__main__.py\xda\x01.)\n\xda\x07zipfileZ\x07ZipFiler?\x00\x00\x00Z\x08infolist\xda\x08filename\xda\x07extractrS\x00\x00\x00\xda\x04read\xda\x02os\xda\x06remove)\x05rU\x00\x00\x00Z\x07zip_refrW\x00\x00\x00\xda\x01f\xda\x06outputr%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x08zip_code\x00\x01\x00\x00s\x12\x00\x00\x00\x00\x01\x10\x01\x0c\x01\n\x01\x0c\x01\x0e\x01&\x01\x0c\x01$\x01z\x1bDecodingAlgorithms.zip_codec\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x08\x00\x00\x00\x03\x00\x00\x00s2\x00\x00\x00\x87\x00\x87\x01f\x02d\x01d\x02\x84\x08\x89\x00\x88\x00t\x00t\x01t\x00t\x02\xa0\x03\x88\x01j\x04d\x03\xa1\x02\x83\x01\x83\x01\x83\x01\x83\x01\x01\x00\x88\x01j\x04S\x00)\x04Nc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\t\x00\x00\x00\t\x00\x00\x00\x13\x00\x00\x00s4\x01\x00\x00|\x00D\x00]\x16}\x01|\x01\xa0\x00\xa1\x00s\x04|\x00\xa0\x01|\x01\xa1\x01\x01\x00q\x04d\x01}\x02|\x00D\x00]\xe4}\x03zbt\x02\xa0\x03d\x02|\x03\xa1\x02d\x01\x19\x00}\x04d\x03d\x04g\x02}\x05d\x05}\x06|\x05D\x00]\x10}\x07|\x07|\x04v\x00rJd\x06}\x06qJ|\x06r\x88\x88\x00t\x04t\x05t\x04t\x06\xa0\x07|\x04d\x03\xa1\x02\x83\x01\x83\x01\x83\x01\x83\x01\x01\x00|\x02d\x077\x00}\x02W\x00q$W\x00n\x16\x04\x00t\x08y\xa0\x01\x00\x01\x00\x01\x00Y\x00q$Y\x00n\x020\x00zHz\x18t\td\x08|\x04\x9b\x00\x9d\x02\x83\x01\xa0\nt\x0b\xa1\x01}\x08W\x00n\x1a\x04\x00t\x0cy\xd6\x01\x00\x01\x00\x01\x00t\t|\x04\x83\x01}\x08Y\x00n\x020\x00\x88\x01j\r\xa0\x0e|\x03|\x08\xa1\x02\x88\x01_\rW\x00q$\x04\x00t\x0c\x90\x01y\x06\x01\x00\x01\x00\x01\x00|\x02d\x077\x00}\x02Y\x00q$0\x00q$|\x02t\x0f|\x00\x83\x01k\x02\x90\x01r0t\x0cd\t|\x02\x9b\x00d\nt\x0f|\x00\x83\x01\x9b\x00\x9d\x04\x83\x01\x82\x01d\x00S\x00)\x0bNr\x02\x00\x00\x00z\x08\\((.+)\\)r#\x00\x00\x00ro\x00\x00\x00FTr\x01\x00\x00\x00\xda\x01bz\x16Exception: exceptions:z\x1c == len(all_eval_functions):)\x10rR\x00\x00\x00r\x81\x00\x00\x00r\x1f\x00\x00\x00r \x00\x00\x00rw\x00\x00\x00\xda\x03setr\x19\x00\x00\x00r+\x00\x00\x00\xda\nIndexErrorr#\x00\x00\x00ri\x00\x00\x00r\x10\x00\x00\x00r"\x00\x00\x00r?\x00\x00\x00rO\x00\x00\x00r!\x00\x00\x00)\tZ\x12all_eval_functions\xda\x04func\xda\nexceptionsZ\x06eval_fZ\teval_bodyZ\rbad_functionsZ\x05is_inr+\x00\x00\x00Z\teval_data\xa9\x02\xda\x0broot_searchrU\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r\x8b\x00\x00\x00\x0c\x01\x00\x00s<\x00\x00\x00\x00\x01\x08\x01\x08\x01\x0c\x02\x04\x01\x08\x01\x02\x01\x10\x01\x08\x01\x04\x01\x08\x01\x08\x01\x06\x01\x04\x01\x1c\x01\x08\x01\x08\x01\x0c\x01\n\x02\x02\x01\x02\x01\x18\x01\x0c\x01\x0e\x01\x14\x01\x0e\x01\x10\x01\x0e\x01\x02\x01\x12\xffz3DecodingAlgorithms.eval_filter.<locals>.root_searchr#\x00\x00\x00)\x05rw\x00\x00\x00r\x86\x00\x00\x00r\x19\x00\x00\x00r+\x00\x00\x00r?\x00\x00\x00rj\x00\x00\x00r%\x00\x00\x00r\x8a\x00\x00\x00r&\x00\x00\x00\xda\x0beval_filter\x0b\x01\x00\x00s\x06\x00\x00\x00\x00\x01\x0e!\x1e\x01z\x1eDecodingAlgorithms.eval_filterc\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\t\x00\x00\x00C\x00\x00\x00sz\x00\x00\x00t\x00t\x01t\x00t\x02\xa0\x03|\x00j\x04\xa1\x01\x83\x01\x83\x01\x83\x01}\x01d\x01}\x02|\x01D\x00]@}\x03z |\x00j\x04\xa0\x05|\x03d\x02t\x06|\x03\x83\x01\x9b\x00d\x02\x9d\x03\xa1\x02|\x00_\x04W\x00q \x04\x00t\x07y^\x01\x00\x01\x00\x01\x00|\x02d\x037\x00}\x02Y\x00q 0\x00q |\x02t\x08|\x01\x83\x01k\x02rtt\x07\x83\x00\x82\x01|\x00j\x04S\x00)\x04Nr\x02\x00\x00\x00r\x1e\x00\x00\x00r\x01\x00\x00\x00)\trw\x00\x00\x00r\x86\x00\x00\x00r\x19\x00\x00\x00r-\x00\x00\x00r?\x00\x00\x00rO\x00\x00\x00r#\x00\x00\x00r"\x00\x00\x00r!\x00\x00\x00)\x04rU\x00\x00\x00Z\x0ball_stringsr\x89\x00\x00\x00r,\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r-\x00\x00\x000\x01\x00\x00s\x14\x00\x00\x00\x00\x01\x18\x01\x04\x01\x08\x01\x02\x01 \x01\x0c\x01\x10\x01\x0c\x01\x06\x01z DecodingAlgorithms.string_filterN)\x10r.\x00\x00\x00r/\x00\x00\x00r0\x00\x00\x00rY\x00\x00\x00r2\x00\x00\x00r\x12\x00\x00\x00r\x11\x00\x00\x00r\x13\x00\x00\x00r\x14\x00\x00\x00r\x15\x00\x00\x00r\x16\x00\x00\x00rx\x00\x00\x00rz\x00\x00\x00r\x84\x00\x00\x00r\x8c\x00\x00\x00r-\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00r4\x00\x00\x00o\x00\x00\x00s\x18\x00\x00\x00\x08\x01\x080\x0e\x07\x0e\x05\x0e\x05\x0e\x05\x0e\x05\x0e\x05\x0e6\x0e\n\x0e\x0b\x0e%r4\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00s\xe2\x00\x00\x00t\x00j\x01\xa0\x02|\x00\xa1\x01s\x1at\x03d\x01|\x00\x9b\x00\x9d\x02\x83\x01\x01\x00t\x04|\x00d\x02\x83\x02\x8f\x18}\x01|\x01\xa0\x05\xa1\x00}\x02W\x00d\x00\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00sB0\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00|\x02d\x00d\x03\x85\x02\x19\x00t\x06v\x00rfd\x04}\x03|\x02}\x04nht\x07|\x02v\x00rxd\x05}\x03|\x00}\x04nVz:t\x04|\x00d\x06\x83\x02\x8f\x1c}\x05d\x07}\x03|\x05\xa0\x05\xa1\x00}\x04W\x00d\x00\x04\x00\x04\x00\x83\x03\x01\x00n\x101\x00s\xa60\x00\x01\x00\x01\x00\x01\x00Y\x00\x01\x00W\x00n\x1a\x04\x00t\x08y\xcc\x01\x00\x01\x00\x01\x00d\x08}\x03d\x00}\x04Y\x00n\x020\x00t\td\t|\x03\x9b\x00d\n\x9d\x03\x83\x01\x01\x00|\x04S\x00)\x0bNz\x13# file not found!: \xda\x02rbr^\x00\x00\x00Z\x03pyc\xda\x03zipr\x03\x00\x00\x00\xda\x02py\xfa\x01?z\x0cFile type ( z\x02 ))\nr\x80\x00\x00\x00\xda\x04path\xda\x06isfile\xda\x04exitrS\x00\x00\x00r\x7f\x00\x00\x00r\r\x00\x00\x00r\x0e\x00\x00\x00\xda\x12UnicodeDecodeErrorrM\x00\x00\x00)\x06r}\x00\x00\x00Z\x05bfilert\x00\x00\x00Z\tfile_typer\x83\x00\x00\x00rW\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00rq\x00\x00\x00=\x01\x00\x00s&\x00\x00\x00\x00\x01\x0c\x01\x0e\x02\x0c\x01&\x02\x10\x01\x04\x01\x06\x02\x08\x01\x04\x01\x06\x03\x02\x01\x0c\x01\x04\x01*\x01\x0c\x01\x04\x01\n\x02\x10\x01rq\x00\x00\x00c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s0\x00\x00\x00t\x00t\x01j\x02\x83\x01t\x03k\x02r,t\x04d\x01\x83\x01\x01\x00t\x05t\x01j\x02d\x02\x83\x02}\x00t\x06\xa0\x04|\x00\xa1\x01\x01\x00d\x00S\x00)\x03Nr\x1c\x00\x00\x00\xda\x06python)\x07rs\x00\x00\x00\xda\x13decoding_algorithmsr?\x00\x00\x00r2\x00\x00\x00rM\x00\x00\x00r\x07\x00\x00\x00\xda\x07console)\x01Z\x06syntaxr%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\tshow_codeY\x01\x00\x00s\x08\x00\x00\x00\x00\x01\x0e\x01\x08\x01\x0c\x01r\x98\x00\x00\x00c\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s\x1a\x00\x00\x00t\x00d\x01t\x01|\x00\x83\x01j\x02\x9b\x00d\x02\x9d\x03\x83\x01\x01\x00d\x00S\x00)\x03Nz\x0f# \x1b[1;32msize: r<\x00\x00\x00)\x03rM\x00\x00\x00\xda\x04SIZEr\x0f\x00\x00\x00)\x01rW\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x0eshow_file_size`\x01\x00\x00s\x02\x00\x00\x00\x00\x01r\x9a\x00\x00\x00\xda\x08__main__r^\x00\x00\x00r\x1d\x00\x00\x00z\t@psh_team\xda\x05clearz)@psh_team <developer mode> total layers: Tz\x16\r# \x1b[1;31mstopped!\x1b[0m)\x01\xda\x06targetz<# \x1b[1;33mcan\'t show the code because the file is to big!\x1b[0mr\x1c\x00\x00\x00z+Press [enter] to continue\nor [n] to stop\n: \xda\x01nz USAGE:\n decode file.py output.py)N\xda\x03foo\xda\x03barrS\x00\x00\x00\xda\x08__file__rW\x00\x00\x00r\x7f\x00\x00\x00rq\x00\x00\x00r\x93\x00\x00\x00r`\x00\x00\x00r\x1f\x00\x00\x00r\x80\x00\x00\x00rJ\x00\x00\x00rP\x00\x00\x00r|\x00\x00\x00r\x11\x00\x00\x00r\x15\x00\x00\x00r\x12\x00\x00\x00\xda\timportlibZ\x0fmultiprocessing\xda\x06typingr\x04\x00\x00\x00r\x05\x00\x00\x00Z\x0crich.consoler\x06\x00\x00\x00Z\x0brich.syntaxr\x07\x00\x00\x00Z\nuncompyle6r\x08\x00\x00\x00Z\x0funcompyle6.mainr\t\x00\x00\x00r\x91\x00\x00\x00rl\x00\x00\x00\xda\x06rsplit\xda\x04utilr\x0c\x00\x00\x00r3\x00\x00\x00\xda\x0f__annotations__r\r\x00\x00\x00r\x0e\x00\x00\x00\xda\n__import__Z\x06ConfigrG\x00\x00\x00\xda\x04Sizer\x99\x00\x00\x00r\x10\x00\x00\x00r2\x00\x00\x00ro\x00\x00\x00rv\x00\x00\x00r#\x00\x00\x00Z\x08OLD_EVALr\x17\x00\x00\x00\xda\x06lstripr\x18\x00\x00\x00r\x19\x00\x00\x00r4\x00\x00\x00r\x98\x00\x00\x00r\x9a\x00\x00\x00r.\x00\x00\x00r!\x00\x00\x00\xda\x04argvrL\x00\x00\x00\xda\x06systemZ\x0ctotal_layers\xda\x04copyrM\x00\x00\x00rt\x00\x00\x00r\x96\x00\x00\x00\xda\x11KeyboardInterruptrs\x00\x00\x00r?\x00\x00\x00r\x97\x00\x00\x00Z\x07Process\xda\x01p\xda\x05startrb\x00\x00\x00\xda\x08is_alive\xda\x04kill\xda\x05inputr%\x00\x00\x00r%\x00\x00\x00r%\x00\x00\x00r&\x00\x00\x00\xda\x08<module>\x02\x00\x00\x00s\x9a\x00\x00\x00\x06\x01\x04\x01\x08\x02\x0c\x01\x08\x01\x02\x02\x02\xfe\x04\x03&\x05\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x01\x08\x02\x10\x02\x0c\x01\x0c\x01\x0c\x01\x0c\x02\x18\x02\x10\x02\x02\x01\x02\xfe\x10\x04\x0c\x01\n\x01\n\x01\x0c\x01\x04\x01\x04\x01\x10\r\x10\x07\x0e+\x0e\x7f\x00O\x08\x1c\x08\x07\x08\x04\n\x01$\x01\n\x01\x04\x01\x04\x02\x0e\x01\x06\x01\x0e\x01\x06\x02\x0e\x02\x02\x01\x14\x01\x0e\x01\x08\x01\x0c\x02\x08\x01\x10\x01\x0e\x01\x06\x01\x0c\x01\x08\x01\n\x01\n\x01\x08\x01\x08\x01\x08\x01\x0c\x01\x04\x01\x12\x01\x04\x01\x10\x02\x10\x01\x1c\x03)\x0b\xda\x03foo\xda\x03bar\xda\x04open\xda\x08__file__\xda\x04file\xda\x04read\xda\x04data\xda\x04exit\xda\x07marshal\xda\x04exec\xda\x05loads\xa9\x00r\x0f\x00\x00\x00r\x0f\x00\x00\x00\xfa\x0b.decode.pyo\xda\x08<module>\x02\x00\x00\x00s\x14\x00\x00\x00\x04\x01\x04\x01\x08\x02\x0c\x01\x08\x01\x02\x02\x02\xfe\x04\x03&\x07\x08\x01')) | 5,742.2 | 28,604 | 0.760127 | 6,171 | 28,711 | 3.5111 | 0.122022 | 0.290488 | 0.191905 | 0.151198 | 0.522361 | 0.451747 | 0.392163 | 0.345364 | 0.324641 | 0.301795 | 0 | 0.363614 | 0.005817 | 28,711 | 5 | 28,604 | 5,742.2 | 0.39546 | 0.002961 | 0 | 0 | 0 | 7 | 0.538308 | 0.511861 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 1 | 0 | 1 | 0.5 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 13 |
e68e1ef3d5e81b0aa0e8d6c2c08f487d9f058241 | 202 | py | Python | tests/unit/gui/widget/test_dropdown.py | matthewgdv/iohandler | 1df420dd3ae2e022d0aa4db864fd4af0dbd665cc | [
"MIT"
] | 1 | 2019-08-28T17:03:27.000Z | 2019-08-28T17:03:27.000Z | tests/unit/gui/widget/test_dropdown.py | matthewgdv/iohandler | 1df420dd3ae2e022d0aa4db864fd4af0dbd665cc | [
"MIT"
] | null | null | null | tests/unit/gui/widget/test_dropdown.py | matthewgdv/iohandler | 1df420dd3ae2e022d0aa4db864fd4af0dbd665cc | [
"MIT"
] | null | null | null | class TestDropDown:
def test_choices(self): # synced
assert True
def test__get_state(self): # synced
assert True
def test__set_state(self): # synced
assert True
| 20.2 | 40 | 0.633663 | 25 | 202 | 4.84 | 0.48 | 0.173554 | 0.396694 | 0.495868 | 0.694215 | 0.446281 | 0 | 0 | 0 | 0 | 0 | 0 | 0.30198 | 202 | 9 | 41 | 22.444444 | 0.858156 | 0.09901 | 0 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.428571 | 1 | 0.428571 | false | 0 | 0 | 0 | 0.571429 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
e6dc955b3e9ee90484dc559dc2b32a31ba0df4aa | 10,481 | py | Python | wagtail/admin/tests/pages/test_page_locking.py | samgans/wagtail | 48a8af71e5333fb701476702bd784fa407567e25 | [
"BSD-3-Clause"
] | 2 | 2019-05-23T01:31:18.000Z | 2020-06-27T21:19:10.000Z | wagtail/admin/tests/pages/test_page_locking.py | samgans/wagtail | 48a8af71e5333fb701476702bd784fa407567e25 | [
"BSD-3-Clause"
] | 6 | 2020-08-26T03:00:03.000Z | 2020-09-24T02:59:14.000Z | wagtail/admin/tests/pages/test_page_locking.py | samgans/wagtail | 48a8af71e5333fb701476702bd784fa407567e25 | [
"BSD-3-Clause"
] | 1 | 2020-04-10T03:21:10.000Z | 2020-04-10T03:21:10.000Z | from django.contrib.auth.models import Group, Permission
from django.test import TestCase
from django.urls import reverse
from django.utils import timezone
from wagtail.core.models import Page
from wagtail.tests.testapp.models import SimplePage
from wagtail.tests.utils import WagtailTestUtils
class TestLocking(TestCase, WagtailTestUtils):
def setUp(self):
# Find root page
self.root_page = Page.objects.get(id=2)
# Login
self.user = self.login()
# Create a page and submit it for moderation
self.child_page = SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=False,
)
self.root_page.add_child(instance=self.child_page)
def test_lock_post(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is locked
page = Page.objects.get(id=self.child_page.id)
self.assertTrue(page.locked)
self.assertEqual(page.locked_by, self.user)
self.assertIsNotNone(page.locked_at)
def test_lock_get(self):
response = self.client.get(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 405)
# Check that the page is still unlocked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
def test_lock_post_already_locked(self):
# Lock the page
self.child_page.locked = True
self.child_page.locked_by = self.user
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is still locked
page = Page.objects.get(id=self.child_page.id)
self.assertTrue(page.locked)
self.assertEqual(page.locked_by, self.user)
self.assertIsNotNone(page.locked_at)
def test_lock_post_with_good_redirect(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )), {
'next': reverse('wagtailadmin_pages:edit', args=(self.child_page.id, ))
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# Check that the page is locked
page = Page.objects.get(id=self.child_page.id)
self.assertTrue(page.locked)
self.assertEqual(page.locked_by, self.user)
self.assertIsNotNone(page.locked_at)
def test_lock_post_with_bad_redirect(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )), {
'next': 'http://www.google.co.uk'
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is locked
page = Page.objects.get(id=self.child_page.id)
self.assertTrue(page.locked)
self.assertEqual(page.locked_by, self.user)
self.assertIsNotNone(page.locked_at)
def test_lock_post_bad_page(self):
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(9999, )))
# Check response
self.assertEqual(response.status_code, 404)
# Check that the page is still unlocked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
def test_lock_post_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
response = self.client.post(reverse('wagtailadmin_pages:lock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 403)
# Check that the page is still unlocked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
def test_unlock_post(self):
# Lock the page
self.child_page.locked = True
self.child_page.locked_by = self.user
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is unlocked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
def test_unlock_get(self):
# Lock the page
self.child_page.locked = True
self.child_page.locked_by = self.user
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.get(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 405)
# Check that the page is still locked
page = Page.objects.get(id=self.child_page.id)
self.assertTrue(page.locked)
self.assertEqual(page.locked_by, self.user)
self.assertIsNotNone(page.locked_at)
def test_unlock_post_already_unlocked(self):
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is still unlocked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
def test_unlock_post_with_good_redirect(self):
# Lock the page
self.child_page.locked = True
self.child_page.locked_by = self.user
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )), {
'next': reverse('wagtailadmin_pages:edit', args=(self.child_page.id, ))
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# Check that the page is unlocked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
def test_unlock_post_with_bad_redirect(self):
# Lock the page
self.child_page.locked = True
self.child_page.locked_by = self.user
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )), {
'next': 'http://www.google.co.uk'
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page is unlocked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
def test_unlock_post_bad_page(self):
# Lock the page
self.child_page.locked = True
self.child_page.locked_by = self.user
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(9999, )))
# Check response
self.assertEqual(response.status_code, 404)
# Check that the page is still locked
page = Page.objects.get(id=self.child_page.id)
self.assertTrue(page.locked)
self.assertEqual(page.locked_by, self.user)
self.assertIsNotNone(page.locked_at)
def test_unlock_post_bad_permissions(self):
# Remove privileges from user
self.user.is_superuser = False
self.user.groups.add(Group.objects.get(name="Editors"))
self.user.save()
# Lock the page
self.child_page.locked = True
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )))
# Check response
self.assertEqual(response.status_code, 403)
# Check that the page is still locked
page = Page.objects.get(id=self.child_page.id)
self.assertTrue(page.locked)
self.assertIsNotNone(page.locked_at)
def test_unlock_post_own_page_with_bad_permissions(self):
# Unlike the previous test, the user can unlock pages that they have locked
# Remove privileges from user
self.user.is_superuser = False
self.user.groups.add(Group.objects.get(name="Editors"))
self.user.save()
# Lock the page
self.child_page.locked = True
self.child_page.locked_by = self.user
self.child_page.locked_at = timezone.now()
self.child_page.save()
response = self.client.post(reverse('wagtailadmin_pages:unlock', args=(self.child_page.id, )), {
'next': reverse('wagtailadmin_pages:edit', args=(self.child_page.id, ))
})
# Check response
self.assertRedirects(response, reverse('wagtailadmin_pages:edit', args=(self.child_page.id, )))
# Check that the page is still locked
page = Page.objects.get(id=self.child_page.id)
self.assertFalse(page.locked)
self.assertIsNone(page.locked_by)
self.assertIsNone(page.locked_at)
| 37.166667 | 104 | 0.663009 | 1,334 | 10,481 | 5.043478 | 0.086207 | 0.089625 | 0.129459 | 0.075803 | 0.889863 | 0.88481 | 0.88481 | 0.881243 | 0.879756 | 0.873216 | 0 | 0.003333 | 0.227078 | 10,481 | 281 | 105 | 37.298932 | 0.827182 | 0.102662 | 0 | 0.770115 | 0 | 0 | 0.080244 | 0.053318 | 0 | 0 | 0 | 0 | 0.33908 | 1 | 0.091954 | false | 0 | 0.04023 | 0 | 0.137931 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e6e937e9fd979949ce965f12d276365e6944ad6c | 360,478 | py | Python | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_policymgr_cfg.py | CiscoDevNet/ydk-py | 073731fea50694d0bc6cd8ebf10fec308dcc0aa9 | [
"ECL-2.0",
"Apache-2.0"
] | 177 | 2016-03-15T17:03:51.000Z | 2022-03-18T16:48:44.000Z | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_policymgr_cfg.py | CiscoDevNet/ydk-py | 073731fea50694d0bc6cd8ebf10fec308dcc0aa9 | [
"ECL-2.0",
"Apache-2.0"
] | 18 | 2016-03-30T10:45:22.000Z | 2020-07-14T16:28:13.000Z | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_policymgr_cfg.py | CiscoDevNet/ydk-py | 073731fea50694d0bc6cd8ebf10fec308dcc0aa9 | [
"ECL-2.0",
"Apache-2.0"
] | 85 | 2016-03-16T20:38:57.000Z | 2022-02-22T04:26:02.000Z | """ Cisco_IOS_XR_infra_policymgr_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR ASR9k policy manager configuration.
Copyright (c) 2013, 2015\-2019 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class AuthorizeIdentifier(Enum):
"""
AuthorizeIdentifier (Enum Class)
Authorize identifier.
.. data:: circuit_id = 0
Authorize circuit ID.
.. data:: dhcp_client_id = 1
Authorize dhcp client ID.
.. data:: remote_id = 2
Authorize remote ID.
.. data:: source_address_ipv4 = 3
Authorize source IPv4 address.
.. data:: source_address_ipv6 = 4
Authorize source IPv6 address.
.. data:: source_address_mac = 5
Authorize source MAC address.
.. data:: username = 6
Authorize username.
"""
circuit_id = Enum.YLeaf(0, "circuit-id")
dhcp_client_id = Enum.YLeaf(1, "dhcp-client-id")
remote_id = Enum.YLeaf(2, "remote-id")
source_address_ipv4 = Enum.YLeaf(3, "source-address-ipv4")
source_address_ipv6 = Enum.YLeaf(4, "source-address-ipv6")
source_address_mac = Enum.YLeaf(5, "source-address-mac")
username = Enum.YLeaf(6, "username")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['AuthorizeIdentifier']
class ClassMapType(Enum):
"""
ClassMapType (Enum Class)
Policy manager class\-map type.
.. data:: qos = 1
QoS Classmap.
.. data:: traffic = 3
TRAFFIC Classmap.
.. data:: control = 4
Control Subscriber Classmap.
"""
qos = Enum.YLeaf(1, "qos")
traffic = Enum.YLeaf(3, "traffic")
control = Enum.YLeaf(4, "control")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['ClassMapType']
class EventType(Enum):
"""
EventType (Enum Class)
Event type.
.. data:: account_logoff = 0
Account logoff event.
.. data:: account_logon = 1
Account logon event.
.. data:: authentication_failure = 2
Authentication failure event.
.. data:: authentication_no_response = 3
Authentication no response event.
.. data:: authorization_failure = 4
Authorization failure event.
.. data:: authorization_no_response = 5
Authorization no response event.
.. data:: credit_exhausted = 6
Credit exhaustion event.
.. data:: exception = 7
Exception event.
.. data:: idle_timeout = 8
Idle timeout event.
.. data:: quota_depleted = 9
Quota depletion event.
.. data:: service_start = 10
Service start event.
.. data:: service_stop = 11
Service stop event.
.. data:: session_activate = 12
Session activate event.
.. data:: session_start = 13
Session start event.
.. data:: session_stop = 14
Session stop event.
.. data:: timer_expiry = 15
Timer expiry event.
"""
account_logoff = Enum.YLeaf(0, "account-logoff")
account_logon = Enum.YLeaf(1, "account-logon")
authentication_failure = Enum.YLeaf(2, "authentication-failure")
authentication_no_response = Enum.YLeaf(3, "authentication-no-response")
authorization_failure = Enum.YLeaf(4, "authorization-failure")
authorization_no_response = Enum.YLeaf(5, "authorization-no-response")
credit_exhausted = Enum.YLeaf(6, "credit-exhausted")
exception = Enum.YLeaf(7, "exception")
idle_timeout = Enum.YLeaf(8, "idle-timeout")
quota_depleted = Enum.YLeaf(9, "quota-depleted")
service_start = Enum.YLeaf(10, "service-start")
service_stop = Enum.YLeaf(11, "service-stop")
session_activate = Enum.YLeaf(12, "session-activate")
session_start = Enum.YLeaf(13, "session-start")
session_stop = Enum.YLeaf(14, "session-stop")
timer_expiry = Enum.YLeaf(15, "timer-expiry")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['EventType']
class ExecutionStrategy(Enum):
"""
ExecutionStrategy (Enum Class)
Executuion strategy.
.. data:: do_all = 0
Do all actions.
.. data:: do_until_failure = 1
Do all actions until failure.
.. data:: do_until_success = 2
Do all actions until success.
"""
do_all = Enum.YLeaf(0, "do-all")
do_until_failure = Enum.YLeaf(1, "do-until-failure")
do_until_success = Enum.YLeaf(2, "do-until-success")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['ExecutionStrategy']
class PmapClassMapType(Enum):
"""
PmapClassMapType (Enum Class)
Policy manager class\-map type.
.. data:: qos = 1
QoS Classmap.
.. data:: traffic = 2
TRAFFIC Classmap.
.. data:: subscriber_control = 3
Subscriber Control Classmap.
"""
qos = Enum.YLeaf(1, "qos")
traffic = Enum.YLeaf(2, "traffic")
subscriber_control = Enum.YLeaf(3, "subscriber-control")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PmapClassMapType']
class PolicyMapType(Enum):
"""
PolicyMapType (Enum Class)
Policy manager policy\-map type.
.. data:: qos = 1
QoS Policymap
.. data:: pbr = 2
PBR Policymap
.. data:: traffic = 3
TRAFFIC Policymap
.. data:: subscriber_control = 4
SUBSCRIBER-CONTROL Policymap
.. data:: accounting = 5
Accounting Policymap
.. data:: redirect = 6
REDIRECT Policy map
.. data:: flow_monitor = 7
FLOWMONITOR Policy map
"""
qos = Enum.YLeaf(1, "qos")
pbr = Enum.YLeaf(2, "pbr")
traffic = Enum.YLeaf(3, "traffic")
subscriber_control = Enum.YLeaf(4, "subscriber-control")
accounting = Enum.YLeaf(5, "accounting")
redirect = Enum.YLeaf(6, "redirect")
flow_monitor = Enum.YLeaf(7, "flow-monitor")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyMapType']
class PolicyManager(_Entity_):
"""
Global Policy Manager configuration.
.. attribute:: class_maps
Class\-maps configuration
**type**\: :py:class:`ClassMaps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps>`
.. attribute:: policy_maps
Policy\-maps configuration
**type**\: :py:class:`PolicyMaps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager, self).__init__()
self._top_entity = None
self.yang_name = "policy-manager"
self.yang_parent_name = "Cisco-IOS-XR-infra-policymgr-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("class-maps", ("class_maps", PolicyManager.ClassMaps)), ("policy-maps", ("policy_maps", PolicyManager.PolicyMaps))])
self._leafs = OrderedDict()
self.class_maps = PolicyManager.ClassMaps()
self.class_maps.parent = self
self._children_name_map["class_maps"] = "class-maps"
self.policy_maps = PolicyManager.PolicyMaps()
self.policy_maps.parent = self
self._children_name_map["policy_maps"] = "policy-maps"
self._segment_path = lambda: "Cisco-IOS-XR-infra-policymgr-cfg:policy-manager"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager, [], name, value)
class ClassMaps(_Entity_):
"""
Class\-maps configuration.
.. attribute:: class_map
Class\-map configuration
**type**\: list of :py:class:`ClassMap <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps, self).__init__()
self.yang_name = "class-maps"
self.yang_parent_name = "policy-manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("class-map", ("class_map", PolicyManager.ClassMaps.ClassMap))])
self._leafs = OrderedDict()
self.class_map = YList(self)
self._segment_path = lambda: "class-maps"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-policymgr-cfg:policy-manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps, [], name, value)
class ClassMap(_Entity_):
"""
Class\-map configuration.
.. attribute:: type (key)
Type of class\-map
**type**\: :py:class:`ClassMapType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.ClassMapType>`
.. attribute:: name (key)
Name of class\-map
**type**\: str
**pattern:** [a\-zA\-Z0\-9][a\-zA\-Z0\-9\\.\_@$%+#\:=<>\\\-]{0,62}
.. attribute:: class_map_mode_match_any
Match all match criteria
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: class_map_mode_match_all
Match any match criteria
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: match
Match rules
**type**\: :py:class:`Match <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match>`
.. attribute:: match_not
Match not rules
**type**\: :py:class:`MatchNot <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot>`
.. attribute:: description
Description for this policy\-map
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap, self).__init__()
self.yang_name = "class-map"
self.yang_parent_name = "class-maps"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['type','name']
self._child_classes = OrderedDict([("match", ("match", PolicyManager.ClassMaps.ClassMap.Match)), ("match-not", ("match_not", PolicyManager.ClassMaps.ClassMap.MatchNot))])
self._leafs = OrderedDict([
('type', (YLeaf(YType.enumeration, 'type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg', 'ClassMapType', '')])),
('name', (YLeaf(YType.str, 'name'), ['str'])),
('class_map_mode_match_any', (YLeaf(YType.empty, 'class-map-mode-match-any'), ['Empty'])),
('class_map_mode_match_all', (YLeaf(YType.empty, 'class-map-mode-match-all'), ['Empty'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
])
self.type = None
self.name = None
self.class_map_mode_match_any = None
self.class_map_mode_match_all = None
self.description = None
self.match = PolicyManager.ClassMaps.ClassMap.Match()
self.match.parent = self
self._children_name_map["match"] = "match"
self.match_not = PolicyManager.ClassMaps.ClassMap.MatchNot()
self.match_not.parent = self
self._children_name_map["match_not"] = "match-not"
self._segment_path = lambda: "class-map" + "[type='" + str(self.type) + "']" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-policymgr-cfg:policy-manager/class-maps/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap, ['type', 'name', 'class_map_mode_match_any', 'class_map_mode_match_all', 'description'], name, value)
class Match(_Entity_):
"""
Match rules.
.. attribute:: ipv4_dscp
Match IPv4 DSCP
**type**\: list of str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(([0\-9]\|[1\-5][0\-9]\|6[0\-3])\-([0\-9]\|[1\-5][0\-9]\|6[0\-3]))\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: ipv6_dscp
Match IPv6 DSCP
**type**\: list of str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(([0\-9]\|[1\-5][0\-9]\|6[0\-3])\-([0\-9]\|[1\-5][0\-9]\|6[0\-3]))\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: dscp
Match DSCP
**type**\: list of str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(([0\-9]\|[1\-5][0\-9]\|6[0\-3])\-([0\-9]\|[1\-5][0\-9]\|6[0\-3]))\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: ipv4_precedence
Match IPv4 precedence
**type**\: union of the below types:
**type**\: list of int
**range:** 0..7
**type**\: list of str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: ipv6_precedence
Match IPv6 precedence
**type**\: union of the below types:
**type**\: list of int
**range:** 0..7
**type**\: list of str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: precedence
Match precedence
**type**\: union of the below types:
**type**\: list of int
**range:** 0..7
**type**\: list of str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: qos_group
Match QoS group. Should be value 0..512 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: traffic_class
Match Traffic Class. Should be value 0..63 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: cos
Match CoS
**type**\: list of int
**range:** 0..7
.. attribute:: inner_cos
Match inner CoS
**type**\: list of int
**range:** 0..7
.. attribute:: dei
Match DEI bit
**type**\: int
**range:** 0..1
.. attribute:: dei_inner
Match DEI INNER bit
**type**\: int
**range:** 0..1
.. attribute:: protocol
Match protocol
**type**\: list of str
**pattern:** ([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\|(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\\-([1\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5]))\|((ahp)\|(dhcpv4)\|(dhcpv6)\|(eigrp)\|(esp)\|(gre)\|(icmp)\|(igmp)\|(igrp)\|(ipinip)\|(ipv4)\|(ipv6)\|(ipv6icmp)\|(mpls)\|(nos)\|(ospf)\|(pcp)\|(pim)\|(ppp)\|(sctp)\|(tcp)\|(udp))
.. attribute:: ipv4_acl
Match IPv4 ACL
**type**\: list of str
**length:** 1..64
.. attribute:: ipv6_acl
Match IPv6 ACL
**type**\: list of str
**length:** 1..64
.. attribute:: ethernet_services_acl
Match Ethernet Services
**type**\: list of str
**length:** 1..64
.. attribute:: mpls_experimental_topmost
Match MPLS experimental topmost label
**type**\: list of int
**range:** 0..7
.. attribute:: mpls_experimental_imposition
Match MPLS experimental imposition label
**type**\: list of int
**range:** 0..7
.. attribute:: discard_class
Match discard class
**type**\: list of int
**range:** 0..7
.. attribute:: ipv4_packet_length
Match IPv4 packet length. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: ipv6_packet_length
Match IPv6 packet length. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: packet_length
Match packet length. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: mpls_disposition_ipv4_access_list
Match MPLS Label Disposition IPv4 access list
**type**\: list of str
**length:** 1..32
.. attribute:: mpls_disposition_ipv6_access_list
Match MPLS Label Disposition IPv6 access list
**type**\: list of str
**length:** 1..32
.. attribute:: mpls_disp_class_map
Match MPLS Label Disposition class\-map
**type**\: list of str
**length:** 1..32
.. attribute:: vlan
Match VLAN ID
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: inner_vlan
Match inner VLAN ID
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: flow_tag
Match flow\-tag. Should be value 1..63 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: ethertype
Match Ethertype
**type**\: list of str
**pattern:** ((153[6\-9]\|15[4\-9][0\-9]\|1[6\-9][0\-9][0\-9]\|[2\-9][0\-9][0\-9][0\-9])\|([1\-5][0\-9][0\-9][0\-9][0\-9]\|6[0\-4][0\-9][0\-9][0\-9])\|(65[0\-4][0\-9][0\-9]\|655[0\-2][0\-9]\|6553[0\-5]))\|((arp)\|(ipv4)\|(ipv6))
.. attribute:: destination_address_ipv4
Match destination IPv4 address
**type**\: list of :py:class:`DestinationAddressIpv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv4>`
.. attribute:: destination_address_ipv6
Match destination IPv6 address
**type**\: list of :py:class:`DestinationAddressIpv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv6>`
.. attribute:: destination_port
Match destination port. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: fragment_type
Match fragment type for a packet
**type**\: list of str
**pattern:** (dont\-fragment)\|(first\-fragment)\|(is\-fragment)\|(last\-fragment)
.. attribute:: frame_relay_dlci
Match frame\-relay DLCI value. Should be value 16..1007 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: fr_de
Match FrameRelay DE bit
**type**\: list of int
**range:** 0..1
.. attribute:: icmpv4_code
Match IPv4 ICMP code. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: icmpv4_type
Match IPv4 ICMP type. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: icmpv6_code
Match IPv6 ICMP code. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: icmpv6_type
Match IPv6 ICMP type. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: source_address_ipv4
Match source IPv4 address
**type**\: list of :py:class:`SourceAddressIpv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv4>`
.. attribute:: source_address_ipv6
Match source IPv6 address
**type**\: list of :py:class:`SourceAddressIpv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv6>`
.. attribute:: source_port
Match source port. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: tcp_flag
Match TCP flags
**type**\: int
**range:** 0..4095
.. attribute:: authen_status
Match authentication status
**type**\: str
**pattern:** (authenticated)\|(unauthenticated)
.. attribute:: circuit_id
Match Circuit ID
**type**\: list of str
**length:** 1..32
.. attribute:: circuit_id_regex
Match Circuit id regex
**type**\: list of str
**length:** 1..32
.. attribute:: dhcp_client_id
Match dhcp client ID
**type**\: list of :py:class:`DhcpClientId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.DhcpClientId>`
.. attribute:: dhcp_client_id_regex
Match dhcp client id regex
**type**\: list of :py:class:`DhcpClientIdRegex <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.DhcpClientIdRegex>`
.. attribute:: domain_name
Match domain name
**type**\: list of :py:class:`DomainName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.DomainName>`
.. attribute:: domain_name_regex
Match domain name
**type**\: list of :py:class:`DomainNameRegex <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.DomainNameRegex>`
.. attribute:: remote_id
Match remote ID
**type**\: list of str
**length:** 1..32
.. attribute:: remote_id_regex
Match remote id regex
**type**\: list of str
**length:** 1..32
.. attribute:: service_name
Match servicve name
**type**\: list of str
**length:** 1..32
.. attribute:: service_name_regex
Match servicve name regular expression
**type**\: list of str
**length:** 1..32
.. attribute:: timer
Match timer
**type**\: list of str
**length:** 1..32
.. attribute:: timer_regex
Match timer regular expression
**type**\: list of str
**length:** 1..32
.. attribute:: user_name
Match user name
**type**\: list of str
**length:** 1..32
.. attribute:: user_name_regex
Match user name regular expression
**type**\: list of str
**length:** 1..32
.. attribute:: source_mac
Match source MAC address
**type**\: list of str
.. attribute:: destination_mac
Match destination MAC address
**type**\: list of str
.. attribute:: vpls_control
Match VPLS control
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_broadcast
Match VPLS Broadcast
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_multicast
Match VPLS Multicast
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_known
Match VPLS Known
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_unknown
Match VPLS Unknown
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: atm_clp
Match ATM CLP bit
**type**\: list of int
**range:** 0..1
.. attribute:: atm_oam
Match ATM OAM
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: cac_admit
Match CAC admitted
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: cac_unadmit
Match CAC unadmitted
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: flow
Match flow
**type**\: :py:class:`Flow <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.Flow>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match, self).__init__()
self.yang_name = "match"
self.yang_parent_name = "class-map"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("destination-address-ipv4", ("destination_address_ipv4", PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv4)), ("destination-address-ipv6", ("destination_address_ipv6", PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv6)), ("source-address-ipv4", ("source_address_ipv4", PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv4)), ("source-address-ipv6", ("source_address_ipv6", PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv6)), ("dhcp-client-id", ("dhcp_client_id", PolicyManager.ClassMaps.ClassMap.Match.DhcpClientId)), ("dhcp-client-id-regex", ("dhcp_client_id_regex", PolicyManager.ClassMaps.ClassMap.Match.DhcpClientIdRegex)), ("domain-name", ("domain_name", PolicyManager.ClassMaps.ClassMap.Match.DomainName)), ("domain-name-regex", ("domain_name_regex", PolicyManager.ClassMaps.ClassMap.Match.DomainNameRegex)), ("flow", ("flow", PolicyManager.ClassMaps.ClassMap.Match.Flow))])
self._leafs = OrderedDict([
('ipv4_dscp', (YLeafList(YType.str, 'ipv4-dscp'), ['str'])),
('ipv6_dscp', (YLeafList(YType.str, 'ipv6-dscp'), ['str'])),
('dscp', (YLeafList(YType.str, 'dscp'), ['str'])),
('ipv4_precedence', (YLeafList(YType.str, 'ipv4-precedence'), ['int','str'])),
('ipv6_precedence', (YLeafList(YType.str, 'ipv6-precedence'), ['int','str'])),
('precedence', (YLeafList(YType.str, 'precedence'), ['int','str'])),
('qos_group', (YLeafList(YType.str, 'qos-group'), ['str'])),
('traffic_class', (YLeafList(YType.str, 'traffic-class'), ['str'])),
('cos', (YLeafList(YType.uint8, 'cos'), ['int'])),
('inner_cos', (YLeafList(YType.uint8, 'inner-cos'), ['int'])),
('dei', (YLeaf(YType.uint8, 'dei'), ['int'])),
('dei_inner', (YLeaf(YType.uint8, 'dei-inner'), ['int'])),
('protocol', (YLeafList(YType.str, 'protocol'), ['str'])),
('ipv4_acl', (YLeafList(YType.str, 'ipv4-acl'), ['str'])),
('ipv6_acl', (YLeafList(YType.str, 'ipv6-acl'), ['str'])),
('ethernet_services_acl', (YLeafList(YType.str, 'ethernet-services-acl'), ['str'])),
('mpls_experimental_topmost', (YLeafList(YType.uint8, 'mpls-experimental-topmost'), ['int'])),
('mpls_experimental_imposition', (YLeafList(YType.uint8, 'mpls-experimental-imposition'), ['int'])),
('discard_class', (YLeafList(YType.uint8, 'discard-class'), ['int'])),
('ipv4_packet_length', (YLeafList(YType.str, 'ipv4-packet-length'), ['str'])),
('ipv6_packet_length', (YLeafList(YType.str, 'ipv6-packet-length'), ['str'])),
('packet_length', (YLeafList(YType.str, 'packet-length'), ['str'])),
('mpls_disposition_ipv4_access_list', (YLeafList(YType.str, 'mpls-disposition-ipv4-access-list'), ['str'])),
('mpls_disposition_ipv6_access_list', (YLeafList(YType.str, 'mpls-disposition-ipv6-access-list'), ['str'])),
('mpls_disp_class_map', (YLeafList(YType.str, 'mpls-disp-class-map'), ['str'])),
('vlan', (YLeafList(YType.str, 'vlan'), ['str'])),
('inner_vlan', (YLeafList(YType.str, 'inner-vlan'), ['str'])),
('flow_tag', (YLeafList(YType.str, 'flow-tag'), ['str'])),
('ethertype', (YLeafList(YType.str, 'ethertype'), ['str'])),
('destination_port', (YLeafList(YType.str, 'destination-port'), ['str'])),
('fragment_type', (YLeafList(YType.str, 'fragment-type'), ['str'])),
('frame_relay_dlci', (YLeafList(YType.str, 'frame-relay-dlci'), ['str'])),
('fr_de', (YLeafList(YType.uint8, 'fr-de'), ['int'])),
('icmpv4_code', (YLeafList(YType.str, 'icmpv4-code'), ['str'])),
('icmpv4_type', (YLeafList(YType.str, 'icmpv4-type'), ['str'])),
('icmpv6_code', (YLeafList(YType.str, 'icmpv6-code'), ['str'])),
('icmpv6_type', (YLeafList(YType.str, 'icmpv6-type'), ['str'])),
('source_port', (YLeafList(YType.str, 'source-port'), ['str'])),
('tcp_flag', (YLeaf(YType.uint16, 'tcp-flag'), ['int'])),
('authen_status', (YLeaf(YType.str, 'authen-status'), ['str'])),
('circuit_id', (YLeafList(YType.str, 'circuit-id'), ['str'])),
('circuit_id_regex', (YLeafList(YType.str, 'circuit-id-regex'), ['str'])),
('remote_id', (YLeafList(YType.str, 'remote-id'), ['str'])),
('remote_id_regex', (YLeafList(YType.str, 'remote-id-regex'), ['str'])),
('service_name', (YLeafList(YType.str, 'service-name'), ['str'])),
('service_name_regex', (YLeafList(YType.str, 'service-name-regex'), ['str'])),
('timer', (YLeafList(YType.str, 'timer'), ['str'])),
('timer_regex', (YLeafList(YType.str, 'timer-regex'), ['str'])),
('user_name', (YLeafList(YType.str, 'user-name'), ['str'])),
('user_name_regex', (YLeafList(YType.str, 'user-name-regex'), ['str'])),
('source_mac', (YLeafList(YType.str, 'source-mac'), ['str'])),
('destination_mac', (YLeafList(YType.str, 'destination-mac'), ['str'])),
('vpls_control', (YLeaf(YType.empty, 'vpls-control'), ['Empty'])),
('vpls_broadcast', (YLeaf(YType.empty, 'vpls-broadcast'), ['Empty'])),
('vpls_multicast', (YLeaf(YType.empty, 'vpls-multicast'), ['Empty'])),
('vpls_known', (YLeaf(YType.empty, 'vpls-known'), ['Empty'])),
('vpls_unknown', (YLeaf(YType.empty, 'vpls-unknown'), ['Empty'])),
('atm_clp', (YLeafList(YType.uint8, 'atm-clp'), ['int'])),
('atm_oam', (YLeaf(YType.empty, 'atm-oam'), ['Empty'])),
('cac_admit', (YLeaf(YType.empty, 'cac-admit'), ['Empty'])),
('cac_unadmit', (YLeaf(YType.empty, 'cac-unadmit'), ['Empty'])),
])
self.ipv4_dscp = []
self.ipv6_dscp = []
self.dscp = []
self.ipv4_precedence = []
self.ipv6_precedence = []
self.precedence = []
self.qos_group = []
self.traffic_class = []
self.cos = []
self.inner_cos = []
self.dei = None
self.dei_inner = None
self.protocol = []
self.ipv4_acl = []
self.ipv6_acl = []
self.ethernet_services_acl = []
self.mpls_experimental_topmost = []
self.mpls_experimental_imposition = []
self.discard_class = []
self.ipv4_packet_length = []
self.ipv6_packet_length = []
self.packet_length = []
self.mpls_disposition_ipv4_access_list = []
self.mpls_disposition_ipv6_access_list = []
self.mpls_disp_class_map = []
self.vlan = []
self.inner_vlan = []
self.flow_tag = []
self.ethertype = []
self.destination_port = []
self.fragment_type = []
self.frame_relay_dlci = []
self.fr_de = []
self.icmpv4_code = []
self.icmpv4_type = []
self.icmpv6_code = []
self.icmpv6_type = []
self.source_port = []
self.tcp_flag = None
self.authen_status = None
self.circuit_id = []
self.circuit_id_regex = []
self.remote_id = []
self.remote_id_regex = []
self.service_name = []
self.service_name_regex = []
self.timer = []
self.timer_regex = []
self.user_name = []
self.user_name_regex = []
self.source_mac = []
self.destination_mac = []
self.vpls_control = None
self.vpls_broadcast = None
self.vpls_multicast = None
self.vpls_known = None
self.vpls_unknown = None
self.atm_clp = []
self.atm_oam = None
self.cac_admit = None
self.cac_unadmit = None
self.flow = PolicyManager.ClassMaps.ClassMap.Match.Flow()
self.flow.parent = self
self._children_name_map["flow"] = "flow"
self.destination_address_ipv4 = YList(self)
self.destination_address_ipv6 = YList(self)
self.source_address_ipv4 = YList(self)
self.source_address_ipv6 = YList(self)
self.dhcp_client_id = YList(self)
self.dhcp_client_id_regex = YList(self)
self.domain_name = YList(self)
self.domain_name_regex = YList(self)
self._segment_path = lambda: "match"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match, ['ipv4_dscp', 'ipv6_dscp', 'dscp', 'ipv4_precedence', 'ipv6_precedence', 'precedence', 'qos_group', 'traffic_class', 'cos', 'inner_cos', 'dei', 'dei_inner', 'protocol', 'ipv4_acl', 'ipv6_acl', 'ethernet_services_acl', 'mpls_experimental_topmost', 'mpls_experimental_imposition', 'discard_class', 'ipv4_packet_length', 'ipv6_packet_length', 'packet_length', 'mpls_disposition_ipv4_access_list', 'mpls_disposition_ipv6_access_list', 'mpls_disp_class_map', 'vlan', 'inner_vlan', 'flow_tag', 'ethertype', 'destination_port', 'fragment_type', 'frame_relay_dlci', 'fr_de', 'icmpv4_code', 'icmpv4_type', 'icmpv6_code', 'icmpv6_type', 'source_port', 'tcp_flag', 'authen_status', 'circuit_id', 'circuit_id_regex', 'remote_id', 'remote_id_regex', 'service_name', 'service_name_regex', 'timer', 'timer_regex', 'user_name', 'user_name_regex', 'source_mac', 'destination_mac', 'vpls_control', 'vpls_broadcast', 'vpls_multicast', 'vpls_known', 'vpls_unknown', 'atm_clp', 'atm_oam', 'cac_admit', 'cac_unadmit'], name, value)
class DestinationAddressIpv4(_Entity_):
"""
Match destination IPv4 address.
.. attribute:: address (key)
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: netmask (key)
IPv4 netmask
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv4, self).__init__()
self.yang_name = "destination-address-ipv4"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','netmask']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('netmask', (YLeaf(YType.str, 'netmask'), ['str'])),
])
self.address = None
self.netmask = None
self._segment_path = lambda: "destination-address-ipv4" + "[address='" + str(self.address) + "']" + "[netmask='" + str(self.netmask) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv4, ['address', 'netmask'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv4']['meta_info']
class DestinationAddressIpv6(_Entity_):
"""
Match destination IPv6 address.
.. attribute:: address (key)
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length (key)
IPv6 prefix length
**type**\: int
**range:** 0..128
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv6, self).__init__()
self.yang_name = "destination-address-ipv6"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','prefix_length']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.address = None
self.prefix_length = None
self._segment_path = lambda: "destination-address-ipv6" + "[address='" + str(self.address) + "']" + "[prefix-length='" + str(self.prefix_length) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv6, ['address', 'prefix_length'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.DestinationAddressIpv6']['meta_info']
class SourceAddressIpv4(_Entity_):
"""
Match source IPv4 address.
.. attribute:: address (key)
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: netmask (key)
IPv4 netmask
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv4, self).__init__()
self.yang_name = "source-address-ipv4"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','netmask']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('netmask', (YLeaf(YType.str, 'netmask'), ['str'])),
])
self.address = None
self.netmask = None
self._segment_path = lambda: "source-address-ipv4" + "[address='" + str(self.address) + "']" + "[netmask='" + str(self.netmask) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv4, ['address', 'netmask'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv4']['meta_info']
class SourceAddressIpv6(_Entity_):
"""
Match source IPv6 address.
.. attribute:: address (key)
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length (key)
IPv6 prefix length
**type**\: int
**range:** 0..128
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv6, self).__init__()
self.yang_name = "source-address-ipv6"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','prefix_length']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.address = None
self.prefix_length = None
self._segment_path = lambda: "source-address-ipv6" + "[address='" + str(self.address) + "']" + "[prefix-length='" + str(self.prefix_length) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv6, ['address', 'prefix_length'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.SourceAddressIpv6']['meta_info']
class DhcpClientId(_Entity_):
"""
Match dhcp client ID.
.. attribute:: value (key)
Dhcp client Id
**type**\: str
**length:** 1..32
.. attribute:: flag (key)
Dhcp client id Ascii/Hex
**type**\: str
**pattern:** (none)\|(ascii)\|(hex)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.DhcpClientId, self).__init__()
self.yang_name = "dhcp-client-id"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['value','flag']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.str, 'value'), ['str'])),
('flag', (YLeaf(YType.str, 'flag'), ['str'])),
])
self.value = None
self.flag = None
self._segment_path = lambda: "dhcp-client-id" + "[value='" + str(self.value) + "']" + "[flag='" + str(self.flag) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.DhcpClientId, ['value', 'flag'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.DhcpClientId']['meta_info']
class DhcpClientIdRegex(_Entity_):
"""
Match dhcp client id regex.
.. attribute:: value (key)
Dhcp client id regular expression
**type**\: str
**length:** 1..32
.. attribute:: flag (key)
Dhcp client Id regex Ascii/Hex
**type**\: str
**pattern:** (none)\|(ascii)\|(hex)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.DhcpClientIdRegex, self).__init__()
self.yang_name = "dhcp-client-id-regex"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['value','flag']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.str, 'value'), ['str'])),
('flag', (YLeaf(YType.str, 'flag'), ['str'])),
])
self.value = None
self.flag = None
self._segment_path = lambda: "dhcp-client-id-regex" + "[value='" + str(self.value) + "']" + "[flag='" + str(self.flag) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.DhcpClientIdRegex, ['value', 'flag'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.DhcpClientIdRegex']['meta_info']
class DomainName(_Entity_):
"""
Match domain name.
.. attribute:: name (key)
Domain name or regular expression
**type**\: str
**length:** 1..32
.. attribute:: format (key)
Domain\-format name
**type**\: str
**length:** 1..32
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.DomainName, self).__init__()
self.yang_name = "domain-name"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name','format']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('format', (YLeaf(YType.str, 'format'), ['str'])),
])
self.name = None
self.format = None
self._segment_path = lambda: "domain-name" + "[name='" + str(self.name) + "']" + "[format='" + str(self.format) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.DomainName, ['name', 'format'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.DomainName']['meta_info']
class DomainNameRegex(_Entity_):
"""
Match domain name.
.. attribute:: regex (key)
Domain name or regular expression
**type**\: str
**length:** 1..32
.. attribute:: format (key)
Domain\-format name
**type**\: str
**length:** 1..32
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.DomainNameRegex, self).__init__()
self.yang_name = "domain-name-regex"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['regex','format']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('regex', (YLeaf(YType.str, 'regex'), ['str'])),
('format', (YLeaf(YType.str, 'format'), ['str'])),
])
self.regex = None
self.format = None
self._segment_path = lambda: "domain-name-regex" + "[regex='" + str(self.regex) + "']" + "[format='" + str(self.format) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.DomainNameRegex, ['regex', 'format'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.DomainNameRegex']['meta_info']
class Flow(_Entity_):
"""
Match flow.
.. attribute:: flow_key
Configure the flow\-key parameters
**type**\: list of str
**pattern:** (SourceIP)\|(DestinationIP)\|(5Tuple)
.. attribute:: flow_cache
Configure the flow\-cache parameters
**type**\: :py:class:`FlowCache <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.Match.Flow.FlowCache>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.Flow, self).__init__()
self.yang_name = "flow"
self.yang_parent_name = "match"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("flow-cache", ("flow_cache", PolicyManager.ClassMaps.ClassMap.Match.Flow.FlowCache))])
self._leafs = OrderedDict([
('flow_key', (YLeafList(YType.str, 'flow-key'), ['str'])),
])
self.flow_key = []
self.flow_cache = PolicyManager.ClassMaps.ClassMap.Match.Flow.FlowCache()
self.flow_cache.parent = self
self._children_name_map["flow_cache"] = "flow-cache"
self._segment_path = lambda: "flow"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.Flow, ['flow_key'], name, value)
class FlowCache(_Entity_):
"""
Configure the flow\-cache parameters
.. attribute:: idle_timeout
Maximum time of inactivity for a flow
**type**\: union of the below types:
**type**\: int
**range:** 10..2550
**type**\: str
**pattern:** (None)\|(none)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.Match.Flow.FlowCache, self).__init__()
self.yang_name = "flow-cache"
self.yang_parent_name = "flow"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('idle_timeout', (YLeaf(YType.str, 'idle-timeout'), ['int','str'])),
])
self.idle_timeout = None
self._segment_path = lambda: "flow-cache"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.Match.Flow.FlowCache, ['idle_timeout'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.Flow.FlowCache']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match.Flow']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.Match']['meta_info']
class MatchNot(_Entity_):
"""
Match not rules.
.. attribute:: ipv4_dscp
Match IPv4 DSCP
**type**\: list of str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(([0\-9]\|[1\-5][0\-9]\|6[0\-3])\-([0\-9]\|[1\-5][0\-9]\|6[0\-3]))\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: ipv6_dscp
Match IPv6 DSCP
**type**\: list of str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(([0\-9]\|[1\-5][0\-9]\|6[0\-3])\-([0\-9]\|[1\-5][0\-9]\|6[0\-3]))\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: dscp
Match DSCP
**type**\: list of str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(([0\-9]\|[1\-5][0\-9]\|6[0\-3])\-([0\-9]\|[1\-5][0\-9]\|6[0\-3]))\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: ipv4_precedence
Match IPv4 precedence
**type**\: union of the below types:
**type**\: list of int
**range:** 0..7
**type**\: list of str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: ipv6_precedence
Match IPv6 precedence
**type**\: union of the below types:
**type**\: list of int
**range:** 0..7
**type**\: list of str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: precedence
Match precedence
**type**\: union of the below types:
**type**\: list of int
**range:** 0..7
**type**\: list of str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: qos_group
Match QoS group. Should be value 0..512 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: traffic_class
Match Traffic Class. Should be value 0..63 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: cos
Match CoS
**type**\: list of int
**range:** 0..7
.. attribute:: inner_cos
Match inner CoS
**type**\: list of int
**range:** 0..7
.. attribute:: dei
Match DEI bit
**type**\: int
**range:** 0..1
.. attribute:: dei_inner
Match DEI INNER bit
**type**\: int
**range:** 0..1
.. attribute:: protocol
Match protocol
**type**\: list of str
**pattern:** ([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\|(([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\\-([1\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5]))\|((ahp)\|(dhcpv4)\|(dhcpv6)\|(eigrp)\|(esp)\|(gre)\|(icmp)\|(igmp)\|(igrp)\|(ipinip)\|(ipv4)\|(ipv6)\|(ipv6icmp)\|(mpls)\|(nos)\|(ospf)\|(pcp)\|(pim)\|(ppp)\|(sctp)\|(tcp)\|(udp))
.. attribute:: ipv4_acl
Match IPv4 ACL
**type**\: list of str
**length:** 1..64
.. attribute:: ipv6_acl
Match IPv6 ACL
**type**\: list of str
**length:** 1..64
.. attribute:: ethernet_services_acl
Match Ethernet Services
**type**\: list of str
**length:** 1..64
.. attribute:: mpls_experimental_topmost
Match MPLS experimental topmost label
**type**\: list of int
**range:** 0..7
.. attribute:: mpls_experimental_imposition
Match MPLS experimental imposition label
**type**\: list of int
**range:** 0..7
.. attribute:: discard_class
Match discard class
**type**\: list of int
**range:** 0..7
.. attribute:: ipv4_packet_length
Match IPv4 packet length. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: ipv6_packet_length
Match IPv6 packet length. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: packet_length
Match packet length. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: mpls_disposition_ipv4_access_list
Match MPLS Label Disposition IPv4 access list
**type**\: list of str
**length:** 1..32
.. attribute:: mpls_disposition_ipv6_access_list
Match MPLS Label Disposition IPv6 access list
**type**\: list of str
**length:** 1..32
.. attribute:: mpls_disp_class_map
Match MPLS Label Disposition class\-map
**type**\: list of str
**length:** 1..32
.. attribute:: vlan
Match VLAN ID
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: inner_vlan
Match inner VLAN ID
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: flow_tag
Match flow\-tag. Should be value 1..63 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: ethertype
Match Ethertype
**type**\: list of str
**pattern:** ((153[6\-9]\|15[4\-9][0\-9]\|1[6\-9][0\-9][0\-9]\|[2\-9][0\-9][0\-9][0\-9])\|([1\-5][0\-9][0\-9][0\-9][0\-9]\|6[0\-4][0\-9][0\-9][0\-9])\|(65[0\-4][0\-9][0\-9]\|655[0\-2][0\-9]\|6553[0\-5]))\|((arp)\|(ipv4)\|(ipv6))
.. attribute:: destination_address_ipv4
Match destination IPv4 address
**type**\: list of :py:class:`DestinationAddressIpv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv4>`
.. attribute:: destination_address_ipv6
Match destination IPv6 address
**type**\: list of :py:class:`DestinationAddressIpv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv6>`
.. attribute:: destination_port
Match destination port. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: fragment_type
Match fragment type for a packet
**type**\: list of str
**pattern:** (dont\-fragment)\|(first\-fragment)\|(is\-fragment)\|(last\-fragment)
.. attribute:: frame_relay_dlci
Match frame\-relay DLCI value. Should be value 16..1007 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: fr_de
Match FrameRelay DE bit
**type**\: list of int
**range:** 0..1
.. attribute:: icmpv4_code
Match IPv4 ICMP code. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: icmpv4_type
Match IPv4 ICMP type. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: icmpv6_code
Match IPv6 ICMP code. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: icmpv6_type
Match IPv6 ICMP type. Should be value 0..255 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: source_address_ipv4
Match source IPv4 address
**type**\: list of :py:class:`SourceAddressIpv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv4>`
.. attribute:: source_address_ipv6
Match source IPv6 address
**type**\: list of :py:class:`SourceAddressIpv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv6>`
.. attribute:: source_port
Match source port. Should be value 0..65535 or range
**type**\: list of str
**pattern:** (\\d+)\|(\\d+\\\-\\d+)
.. attribute:: tcp_flag
Match TCP flags
**type**\: int
**range:** 0..4095
.. attribute:: authen_status
Match authentication status
**type**\: str
**pattern:** (authenticated)\|(unauthenticated)
.. attribute:: circuit_id
Match Circuit ID
**type**\: list of str
**length:** 1..32
.. attribute:: circuit_id_regex
Match Circuit id regex
**type**\: list of str
**length:** 1..32
.. attribute:: dhcp_client_id
Match dhcp client ID
**type**\: list of :py:class:`DhcpClientId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientId>`
.. attribute:: dhcp_client_id_regex
Match dhcp client id regex
**type**\: list of :py:class:`DhcpClientIdRegex <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientIdRegex>`
.. attribute:: domain_name
Match domain name
**type**\: list of :py:class:`DomainName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.DomainName>`
.. attribute:: domain_name_regex
Match domain name
**type**\: list of :py:class:`DomainNameRegex <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.DomainNameRegex>`
.. attribute:: remote_id
Match remote ID
**type**\: list of str
**length:** 1..32
.. attribute:: remote_id_regex
Match remote id regex
**type**\: list of str
**length:** 1..32
.. attribute:: service_name
Match servicve name
**type**\: list of str
**length:** 1..32
.. attribute:: service_name_regex
Match servicve name regular expression
**type**\: list of str
**length:** 1..32
.. attribute:: timer
Match timer
**type**\: list of str
**length:** 1..32
.. attribute:: timer_regex
Match timer regular expression
**type**\: list of str
**length:** 1..32
.. attribute:: user_name
Match user name
**type**\: list of str
**length:** 1..32
.. attribute:: user_name_regex
Match user name regular expression
**type**\: list of str
**length:** 1..32
.. attribute:: source_mac
Match source MAC address
**type**\: list of str
.. attribute:: destination_mac
Match destination MAC address
**type**\: list of str
.. attribute:: vpls_control
Match VPLS control
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_broadcast
Match VPLS Broadcast
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_multicast
Match VPLS Multicast
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_known
Match VPLS Known
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: vpls_unknown
Match VPLS Unknown
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: flow
Match flow
**type**\: :py:class:`Flow <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.ClassMaps.ClassMap.MatchNot.Flow>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot, self).__init__()
self.yang_name = "match-not"
self.yang_parent_name = "class-map"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("destination-address-ipv4", ("destination_address_ipv4", PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv4)), ("destination-address-ipv6", ("destination_address_ipv6", PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv6)), ("source-address-ipv4", ("source_address_ipv4", PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv4)), ("source-address-ipv6", ("source_address_ipv6", PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv6)), ("dhcp-client-id", ("dhcp_client_id", PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientId)), ("dhcp-client-id-regex", ("dhcp_client_id_regex", PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientIdRegex)), ("domain-name", ("domain_name", PolicyManager.ClassMaps.ClassMap.MatchNot.DomainName)), ("domain-name-regex", ("domain_name_regex", PolicyManager.ClassMaps.ClassMap.MatchNot.DomainNameRegex)), ("flow", ("flow", PolicyManager.ClassMaps.ClassMap.MatchNot.Flow))])
self._leafs = OrderedDict([
('ipv4_dscp', (YLeafList(YType.str, 'ipv4-dscp'), ['str'])),
('ipv6_dscp', (YLeafList(YType.str, 'ipv6-dscp'), ['str'])),
('dscp', (YLeafList(YType.str, 'dscp'), ['str'])),
('ipv4_precedence', (YLeafList(YType.str, 'ipv4-precedence'), ['int','str'])),
('ipv6_precedence', (YLeafList(YType.str, 'ipv6-precedence'), ['int','str'])),
('precedence', (YLeafList(YType.str, 'precedence'), ['int','str'])),
('qos_group', (YLeafList(YType.str, 'qos-group'), ['str'])),
('traffic_class', (YLeafList(YType.str, 'traffic-class'), ['str'])),
('cos', (YLeafList(YType.uint8, 'cos'), ['int'])),
('inner_cos', (YLeafList(YType.uint8, 'inner-cos'), ['int'])),
('dei', (YLeaf(YType.uint8, 'dei'), ['int'])),
('dei_inner', (YLeaf(YType.uint8, 'dei-inner'), ['int'])),
('protocol', (YLeafList(YType.str, 'protocol'), ['str'])),
('ipv4_acl', (YLeafList(YType.str, 'ipv4-acl'), ['str'])),
('ipv6_acl', (YLeafList(YType.str, 'ipv6-acl'), ['str'])),
('ethernet_services_acl', (YLeafList(YType.str, 'ethernet-services-acl'), ['str'])),
('mpls_experimental_topmost', (YLeafList(YType.uint8, 'mpls-experimental-topmost'), ['int'])),
('mpls_experimental_imposition', (YLeafList(YType.uint8, 'mpls-experimental-imposition'), ['int'])),
('discard_class', (YLeafList(YType.uint8, 'discard-class'), ['int'])),
('ipv4_packet_length', (YLeafList(YType.str, 'ipv4-packet-length'), ['str'])),
('ipv6_packet_length', (YLeafList(YType.str, 'ipv6-packet-length'), ['str'])),
('packet_length', (YLeafList(YType.str, 'packet-length'), ['str'])),
('mpls_disposition_ipv4_access_list', (YLeafList(YType.str, 'mpls-disposition-ipv4-access-list'), ['str'])),
('mpls_disposition_ipv6_access_list', (YLeafList(YType.str, 'mpls-disposition-ipv6-access-list'), ['str'])),
('mpls_disp_class_map', (YLeafList(YType.str, 'mpls-disp-class-map'), ['str'])),
('vlan', (YLeafList(YType.str, 'vlan'), ['str'])),
('inner_vlan', (YLeafList(YType.str, 'inner-vlan'), ['str'])),
('flow_tag', (YLeafList(YType.str, 'flow-tag'), ['str'])),
('ethertype', (YLeafList(YType.str, 'ethertype'), ['str'])),
('destination_port', (YLeafList(YType.str, 'destination-port'), ['str'])),
('fragment_type', (YLeafList(YType.str, 'fragment-type'), ['str'])),
('frame_relay_dlci', (YLeafList(YType.str, 'frame-relay-dlci'), ['str'])),
('fr_de', (YLeafList(YType.uint8, 'fr-de'), ['int'])),
('icmpv4_code', (YLeafList(YType.str, 'icmpv4-code'), ['str'])),
('icmpv4_type', (YLeafList(YType.str, 'icmpv4-type'), ['str'])),
('icmpv6_code', (YLeafList(YType.str, 'icmpv6-code'), ['str'])),
('icmpv6_type', (YLeafList(YType.str, 'icmpv6-type'), ['str'])),
('source_port', (YLeafList(YType.str, 'source-port'), ['str'])),
('tcp_flag', (YLeaf(YType.uint16, 'tcp-flag'), ['int'])),
('authen_status', (YLeaf(YType.str, 'authen-status'), ['str'])),
('circuit_id', (YLeafList(YType.str, 'circuit-id'), ['str'])),
('circuit_id_regex', (YLeafList(YType.str, 'circuit-id-regex'), ['str'])),
('remote_id', (YLeafList(YType.str, 'remote-id'), ['str'])),
('remote_id_regex', (YLeafList(YType.str, 'remote-id-regex'), ['str'])),
('service_name', (YLeafList(YType.str, 'service-name'), ['str'])),
('service_name_regex', (YLeafList(YType.str, 'service-name-regex'), ['str'])),
('timer', (YLeafList(YType.str, 'timer'), ['str'])),
('timer_regex', (YLeafList(YType.str, 'timer-regex'), ['str'])),
('user_name', (YLeafList(YType.str, 'user-name'), ['str'])),
('user_name_regex', (YLeafList(YType.str, 'user-name-regex'), ['str'])),
('source_mac', (YLeafList(YType.str, 'source-mac'), ['str'])),
('destination_mac', (YLeafList(YType.str, 'destination-mac'), ['str'])),
('vpls_control', (YLeaf(YType.empty, 'vpls-control'), ['Empty'])),
('vpls_broadcast', (YLeaf(YType.empty, 'vpls-broadcast'), ['Empty'])),
('vpls_multicast', (YLeaf(YType.empty, 'vpls-multicast'), ['Empty'])),
('vpls_known', (YLeaf(YType.empty, 'vpls-known'), ['Empty'])),
('vpls_unknown', (YLeaf(YType.empty, 'vpls-unknown'), ['Empty'])),
])
self.ipv4_dscp = []
self.ipv6_dscp = []
self.dscp = []
self.ipv4_precedence = []
self.ipv6_precedence = []
self.precedence = []
self.qos_group = []
self.traffic_class = []
self.cos = []
self.inner_cos = []
self.dei = None
self.dei_inner = None
self.protocol = []
self.ipv4_acl = []
self.ipv6_acl = []
self.ethernet_services_acl = []
self.mpls_experimental_topmost = []
self.mpls_experimental_imposition = []
self.discard_class = []
self.ipv4_packet_length = []
self.ipv6_packet_length = []
self.packet_length = []
self.mpls_disposition_ipv4_access_list = []
self.mpls_disposition_ipv6_access_list = []
self.mpls_disp_class_map = []
self.vlan = []
self.inner_vlan = []
self.flow_tag = []
self.ethertype = []
self.destination_port = []
self.fragment_type = []
self.frame_relay_dlci = []
self.fr_de = []
self.icmpv4_code = []
self.icmpv4_type = []
self.icmpv6_code = []
self.icmpv6_type = []
self.source_port = []
self.tcp_flag = None
self.authen_status = None
self.circuit_id = []
self.circuit_id_regex = []
self.remote_id = []
self.remote_id_regex = []
self.service_name = []
self.service_name_regex = []
self.timer = []
self.timer_regex = []
self.user_name = []
self.user_name_regex = []
self.source_mac = []
self.destination_mac = []
self.vpls_control = None
self.vpls_broadcast = None
self.vpls_multicast = None
self.vpls_known = None
self.vpls_unknown = None
self.flow = PolicyManager.ClassMaps.ClassMap.MatchNot.Flow()
self.flow.parent = self
self._children_name_map["flow"] = "flow"
self.destination_address_ipv4 = YList(self)
self.destination_address_ipv6 = YList(self)
self.source_address_ipv4 = YList(self)
self.source_address_ipv6 = YList(self)
self.dhcp_client_id = YList(self)
self.dhcp_client_id_regex = YList(self)
self.domain_name = YList(self)
self.domain_name_regex = YList(self)
self._segment_path = lambda: "match-not"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot, ['ipv4_dscp', 'ipv6_dscp', 'dscp', 'ipv4_precedence', 'ipv6_precedence', 'precedence', 'qos_group', 'traffic_class', 'cos', 'inner_cos', 'dei', 'dei_inner', 'protocol', 'ipv4_acl', 'ipv6_acl', 'ethernet_services_acl', 'mpls_experimental_topmost', 'mpls_experimental_imposition', 'discard_class', 'ipv4_packet_length', 'ipv6_packet_length', 'packet_length', 'mpls_disposition_ipv4_access_list', 'mpls_disposition_ipv6_access_list', 'mpls_disp_class_map', 'vlan', 'inner_vlan', 'flow_tag', 'ethertype', 'destination_port', 'fragment_type', 'frame_relay_dlci', 'fr_de', 'icmpv4_code', 'icmpv4_type', 'icmpv6_code', 'icmpv6_type', 'source_port', 'tcp_flag', 'authen_status', 'circuit_id', 'circuit_id_regex', 'remote_id', 'remote_id_regex', 'service_name', 'service_name_regex', 'timer', 'timer_regex', 'user_name', 'user_name_regex', 'source_mac', 'destination_mac', 'vpls_control', 'vpls_broadcast', 'vpls_multicast', 'vpls_known', 'vpls_unknown'], name, value)
class DestinationAddressIpv4(_Entity_):
"""
Match destination IPv4 address.
.. attribute:: address (key)
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: netmask (key)
IPv4 netmask
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv4, self).__init__()
self.yang_name = "destination-address-ipv4"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','netmask']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('netmask', (YLeaf(YType.str, 'netmask'), ['str'])),
])
self.address = None
self.netmask = None
self._segment_path = lambda: "destination-address-ipv4" + "[address='" + str(self.address) + "']" + "[netmask='" + str(self.netmask) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv4, ['address', 'netmask'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv4']['meta_info']
class DestinationAddressIpv6(_Entity_):
"""
Match destination IPv6 address.
.. attribute:: address (key)
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length (key)
IPv6 prefix length
**type**\: int
**range:** 0..128
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv6, self).__init__()
self.yang_name = "destination-address-ipv6"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','prefix_length']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.address = None
self.prefix_length = None
self._segment_path = lambda: "destination-address-ipv6" + "[address='" + str(self.address) + "']" + "[prefix-length='" + str(self.prefix_length) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv6, ['address', 'prefix_length'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.DestinationAddressIpv6']['meta_info']
class SourceAddressIpv4(_Entity_):
"""
Match source IPv4 address.
.. attribute:: address (key)
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: netmask (key)
IPv4 netmask
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv4, self).__init__()
self.yang_name = "source-address-ipv4"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','netmask']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('netmask', (YLeaf(YType.str, 'netmask'), ['str'])),
])
self.address = None
self.netmask = None
self._segment_path = lambda: "source-address-ipv4" + "[address='" + str(self.address) + "']" + "[netmask='" + str(self.netmask) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv4, ['address', 'netmask'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv4']['meta_info']
class SourceAddressIpv6(_Entity_):
"""
Match source IPv6 address.
.. attribute:: address (key)
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: prefix_length (key)
IPv6 prefix length
**type**\: int
**range:** 0..128
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv6, self).__init__()
self.yang_name = "source-address-ipv6"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['address','prefix_length']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.address = None
self.prefix_length = None
self._segment_path = lambda: "source-address-ipv6" + "[address='" + str(self.address) + "']" + "[prefix-length='" + str(self.prefix_length) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv6, ['address', 'prefix_length'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.SourceAddressIpv6']['meta_info']
class DhcpClientId(_Entity_):
"""
Match dhcp client ID.
.. attribute:: value (key)
Dhcp client Id
**type**\: str
**length:** 1..32
.. attribute:: flag (key)
Dhcp client id Ascii/Hex
**type**\: str
**pattern:** (none)\|(ascii)\|(hex)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientId, self).__init__()
self.yang_name = "dhcp-client-id"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['value','flag']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.str, 'value'), ['str'])),
('flag', (YLeaf(YType.str, 'flag'), ['str'])),
])
self.value = None
self.flag = None
self._segment_path = lambda: "dhcp-client-id" + "[value='" + str(self.value) + "']" + "[flag='" + str(self.flag) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientId, ['value', 'flag'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientId']['meta_info']
class DhcpClientIdRegex(_Entity_):
"""
Match dhcp client id regex.
.. attribute:: value (key)
Dhcp client id regular expression
**type**\: str
**length:** 1..32
.. attribute:: flag (key)
Dhcp client Id regex Ascii/Hex
**type**\: str
**pattern:** (none)\|(ascii)\|(hex)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientIdRegex, self).__init__()
self.yang_name = "dhcp-client-id-regex"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['value','flag']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.str, 'value'), ['str'])),
('flag', (YLeaf(YType.str, 'flag'), ['str'])),
])
self.value = None
self.flag = None
self._segment_path = lambda: "dhcp-client-id-regex" + "[value='" + str(self.value) + "']" + "[flag='" + str(self.flag) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientIdRegex, ['value', 'flag'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.DhcpClientIdRegex']['meta_info']
class DomainName(_Entity_):
"""
Match domain name.
.. attribute:: name (key)
Domain name or regular expression
**type**\: str
**length:** 1..32
.. attribute:: format (key)
Domain\-format name
**type**\: str
**length:** 1..32
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.DomainName, self).__init__()
self.yang_name = "domain-name"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['name','format']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('format', (YLeaf(YType.str, 'format'), ['str'])),
])
self.name = None
self.format = None
self._segment_path = lambda: "domain-name" + "[name='" + str(self.name) + "']" + "[format='" + str(self.format) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.DomainName, ['name', 'format'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.DomainName']['meta_info']
class DomainNameRegex(_Entity_):
"""
Match domain name.
.. attribute:: regex (key)
Domain name or regular expression
**type**\: str
**length:** 1..32
.. attribute:: format (key)
Domain\-format name
**type**\: str
**length:** 1..32
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.DomainNameRegex, self).__init__()
self.yang_name = "domain-name-regex"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['regex','format']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('regex', (YLeaf(YType.str, 'regex'), ['str'])),
('format', (YLeaf(YType.str, 'format'), ['str'])),
])
self.regex = None
self.format = None
self._segment_path = lambda: "domain-name-regex" + "[regex='" + str(self.regex) + "']" + "[format='" + str(self.format) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.DomainNameRegex, ['regex', 'format'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.DomainNameRegex']['meta_info']
class Flow(_Entity_):
"""
Match flow.
.. attribute:: flow_tag
Configure the flow\-tag parameters
**type**\: list of int
**range:** 1..63
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.ClassMaps.ClassMap.MatchNot.Flow, self).__init__()
self.yang_name = "flow"
self.yang_parent_name = "match-not"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('flow_tag', (YLeafList(YType.uint16, 'flow-tag'), ['int'])),
])
self.flow_tag = []
self._segment_path = lambda: "flow"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.ClassMaps.ClassMap.MatchNot.Flow, ['flow_tag'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot.Flow']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap.MatchNot']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps.ClassMap']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.ClassMaps']['meta_info']
class PolicyMaps(_Entity_):
"""
Policy\-maps configuration.
.. attribute:: policy_map
Policy\-map configuration
**type**\: list of :py:class:`PolicyMap <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps, self).__init__()
self.yang_name = "policy-maps"
self.yang_parent_name = "policy-manager"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("policy-map", ("policy_map", PolicyManager.PolicyMaps.PolicyMap))])
self._leafs = OrderedDict()
self.policy_map = YList(self)
self._segment_path = lambda: "policy-maps"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-policymgr-cfg:policy-manager/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps, [], name, value)
class PolicyMap(_Entity_):
"""
Policy\-map configuration.
.. attribute:: type (key)
Type of policy\-map
**type**\: :py:class:`PolicyMapType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyMapType>`
.. attribute:: name (key)
Name of policy\-map
**type**\: str
**pattern:** [a\-zA\-Z0\-9][a\-zA\-Z0\-9\\.\_@$%+#\:=<>\\\-]{0,62}
.. attribute:: event
Policy event
**type**\: list of :py:class:`Event <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event>`
.. attribute:: policy_map_rule
Class\-map rule
**type**\: list of :py:class:`PolicyMapRule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule>`
.. attribute:: description
Description for this policy\-map
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap, self).__init__()
self.yang_name = "policy-map"
self.yang_parent_name = "policy-maps"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['type','name']
self._child_classes = OrderedDict([("event", ("event", PolicyManager.PolicyMaps.PolicyMap.Event)), ("policy-map-rule", ("policy_map_rule", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule))])
self._leafs = OrderedDict([
('type', (YLeaf(YType.enumeration, 'type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg', 'PolicyMapType', '')])),
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
])
self.type = None
self.name = None
self.description = None
self.event = YList(self)
self.policy_map_rule = YList(self)
self._segment_path = lambda: "policy-map" + "[type='" + str(self.type) + "']" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-policymgr-cfg:policy-manager/policy-maps/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap, ['type', 'name', 'description'], name, value)
class Event(_Entity_):
"""
Policy event.
.. attribute:: event_type (key)
Event type
**type**\: :py:class:`EventType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.EventType>`
.. attribute:: event_mode_match_all
Execute all the matched classes
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: event_mode_match_first
Execute only the first matched class
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: class_
Class\-map rule
**type**\: list of :py:class:`Class <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event, self).__init__()
self.yang_name = "event"
self.yang_parent_name = "policy-map"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['event_type']
self._child_classes = OrderedDict([("class", ("class_", PolicyManager.PolicyMaps.PolicyMap.Event.Class))])
self._leafs = OrderedDict([
('event_type', (YLeaf(YType.enumeration, 'event-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg', 'EventType', '')])),
('event_mode_match_all', (YLeaf(YType.empty, 'event-mode-match-all'), ['Empty'])),
('event_mode_match_first', (YLeaf(YType.empty, 'event-mode-match-first'), ['Empty'])),
])
self.event_type = None
self.event_mode_match_all = None
self.event_mode_match_first = None
self.class_ = YList(self)
self._segment_path = lambda: "event" + "[event-type='" + str(self.event_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event, ['event_type', 'event_mode_match_all', 'event_mode_match_first'], name, value)
class Class(_Entity_):
"""
Class\-map rule.
.. attribute:: class_name (key)
Name of class
**type**\: str
**pattern:** [a\-zA\-Z0\-9][a\-zA\-Z0\-9\\.\_@$%+#\:=<>\\\-]{0,62}
.. attribute:: class_type (key)
Type of class
**type**\: :py:class:`PmapClassMapType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PmapClassMapType>`
.. attribute:: class_execution_strategy
Class execution strategy
**type**\: :py:class:`ExecutionStrategy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.ExecutionStrategy>`
.. attribute:: action_rule
Action rule
**type**\: list of :py:class:`ActionRule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class, self).__init__()
self.yang_name = "class"
self.yang_parent_name = "event"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['class_name','class_type']
self._child_classes = OrderedDict([("action-rule", ("action_rule", PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule))])
self._leafs = OrderedDict([
('class_name', (YLeaf(YType.str, 'class-name'), ['str'])),
('class_type', (YLeaf(YType.enumeration, 'class-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg', 'PmapClassMapType', '')])),
('class_execution_strategy', (YLeaf(YType.enumeration, 'class-execution-strategy'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg', 'ExecutionStrategy', '')])),
])
self.class_name = None
self.class_type = None
self.class_execution_strategy = None
self.action_rule = YList(self)
self._segment_path = lambda: "class" + "[class-name='" + str(self.class_name) + "']" + "[class-type='" + str(self.class_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class, ['class_name', 'class_type', 'class_execution_strategy'], name, value)
class ActionRule(_Entity_):
"""
Action rule.
.. attribute:: action_sequence_number (key)
Sequence number for this action
**type**\: int
**range:** 1..65535
.. attribute:: activate_dynamic_template
Activate dynamic templates
**type**\: :py:class:`ActivateDynamicTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.ActivateDynamicTemplate>`
**presence node**\: True
.. attribute:: authenticate
Authentication related configuration
**type**\: :py:class:`Authenticate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authenticate>`
.. attribute:: authorize
Authorize
**type**\: :py:class:`Authorize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authorize>`
**presence node**\: True
.. attribute:: deactivate_dynamic_template
Deactivate dynamic templates
**type**\: :py:class:`DeactivateDynamicTemplate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.DeactivateDynamicTemplate>`
**presence node**\: True
.. attribute:: disconnect
Disconnect session
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: monitor
Monitor session
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: set_timer
Set a timer to execute a rule on its expiry
**type**\: :py:class:`SetTimer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.SetTimer>`
**presence node**\: True
.. attribute:: stop_timer
Disable timer before it expires
**type**\: :py:class:`StopTimer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.StopTimer>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule, self).__init__()
self.yang_name = "action-rule"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['action_sequence_number']
self._child_classes = OrderedDict([("activate-dynamic-template", ("activate_dynamic_template", PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.ActivateDynamicTemplate)), ("authenticate", ("authenticate", PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authenticate)), ("authorize", ("authorize", PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authorize)), ("deactivate-dynamic-template", ("deactivate_dynamic_template", PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.DeactivateDynamicTemplate)), ("set-timer", ("set_timer", PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.SetTimer)), ("stop-timer", ("stop_timer", PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.StopTimer))])
self._leafs = OrderedDict([
('action_sequence_number', (YLeaf(YType.uint16, 'action-sequence-number'), ['int'])),
('disconnect', (YLeaf(YType.empty, 'disconnect'), ['Empty'])),
('monitor', (YLeaf(YType.empty, 'monitor'), ['Empty'])),
])
self.action_sequence_number = None
self.disconnect = None
self.monitor = None
self.activate_dynamic_template = None
self._children_name_map["activate_dynamic_template"] = "activate-dynamic-template"
self.authenticate = PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authenticate()
self.authenticate.parent = self
self._children_name_map["authenticate"] = "authenticate"
self.authorize = None
self._children_name_map["authorize"] = "authorize"
self.deactivate_dynamic_template = None
self._children_name_map["deactivate_dynamic_template"] = "deactivate-dynamic-template"
self.set_timer = None
self._children_name_map["set_timer"] = "set-timer"
self.stop_timer = PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.StopTimer()
self.stop_timer.parent = self
self._children_name_map["stop_timer"] = "stop-timer"
self._segment_path = lambda: "action-rule" + "[action-sequence-number='" + str(self.action_sequence_number) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule, ['action_sequence_number', 'disconnect', 'monitor'], name, value)
class ActivateDynamicTemplate(_Entity_):
"""
Activate dynamic templates.
.. attribute:: name
Dynamic template name
**type**\: str
**mandatory**\: True
.. attribute:: aaa_list
Name of the AAA method list
**type**\: str
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.ActivateDynamicTemplate, self).__init__()
self.yang_name = "activate-dynamic-template"
self.yang_parent_name = "action-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('aaa_list', (YLeaf(YType.str, 'aaa-list'), ['str'])),
])
self.name = None
self.aaa_list = None
self._segment_path = lambda: "activate-dynamic-template"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.ActivateDynamicTemplate, ['name', 'aaa_list'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.ActivateDynamicTemplate']['meta_info']
class Authenticate(_Entity_):
"""
Authentication related configuration.
.. attribute:: aaa_list
Name of the AAA method list
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authenticate, self).__init__()
self.yang_name = "authenticate"
self.yang_parent_name = "action-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('aaa_list', (YLeaf(YType.str, 'aaa-list'), ['str'])),
])
self.aaa_list = None
self._segment_path = lambda: "authenticate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authenticate, ['aaa_list'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authenticate']['meta_info']
class Authorize(_Entity_):
"""
Authorize.
.. attribute:: aaa_list
Name of the AAA method list
**type**\: str
**mandatory**\: True
.. attribute:: format
Specify an Authorize format name
**type**\: str
.. attribute:: identifier
Specify an Authorize format name
**type**\: :py:class:`AuthorizeIdentifier <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.AuthorizeIdentifier>`
.. attribute:: password
Specify a password to be used for AAA request
**type**\: str
**mandatory**\: True
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authorize, self).__init__()
self.yang_name = "authorize"
self.yang_parent_name = "action-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('aaa_list', (YLeaf(YType.str, 'aaa-list'), ['str'])),
('format', (YLeaf(YType.str, 'format'), ['str'])),
('identifier', (YLeaf(YType.enumeration, 'identifier'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg', 'AuthorizeIdentifier', '')])),
('password', (YLeaf(YType.str, 'password'), ['str'])),
])
self.aaa_list = None
self.format = None
self.identifier = None
self.password = None
self._segment_path = lambda: "authorize"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authorize, ['aaa_list', 'format', 'identifier', 'password'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.Authorize']['meta_info']
class DeactivateDynamicTemplate(_Entity_):
"""
Deactivate dynamic templates.
.. attribute:: name
Dynamic template name
**type**\: str
**mandatory**\: True
.. attribute:: aaa_list
Name of the AAA method list
**type**\: str
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.DeactivateDynamicTemplate, self).__init__()
self.yang_name = "deactivate-dynamic-template"
self.yang_parent_name = "action-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('aaa_list', (YLeaf(YType.str, 'aaa-list'), ['str'])),
])
self.name = None
self.aaa_list = None
self._segment_path = lambda: "deactivate-dynamic-template"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.DeactivateDynamicTemplate, ['name', 'aaa_list'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.DeactivateDynamicTemplate']['meta_info']
class SetTimer(_Entity_):
"""
Set a timer to execute a rule on its
expiry
.. attribute:: timer_name
Name of the timer
**type**\: str
**mandatory**\: True
.. attribute:: timer_value
Timer value in minutes
**type**\: int
**range:** 0..4294967295
**mandatory**\: True
**units**\: minutes
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.SetTimer, self).__init__()
self.yang_name = "set-timer"
self.yang_parent_name = "action-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('timer_name', (YLeaf(YType.str, 'timer-name'), ['str'])),
('timer_value', (YLeaf(YType.uint32, 'timer-value'), ['int'])),
])
self.timer_name = None
self.timer_value = None
self._segment_path = lambda: "set-timer"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.SetTimer, ['timer_name', 'timer_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.SetTimer']['meta_info']
class StopTimer(_Entity_):
"""
Disable timer before it expires.
.. attribute:: timer_name
Name of the timer
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.StopTimer, self).__init__()
self.yang_name = "stop-timer"
self.yang_parent_name = "action-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('timer_name', (YLeaf(YType.str, 'timer-name'), ['str'])),
])
self.timer_name = None
self._segment_path = lambda: "stop-timer"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.StopTimer, ['timer_name'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule.StopTimer']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class.ActionRule']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event.Class']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.Event']['meta_info']
class PolicyMapRule(_Entity_):
"""
Class\-map rule.
.. attribute:: class_name (key)
Name of class\-map
**type**\: str
**pattern:** [a\-zA\-Z0\-9][a\-zA\-Z0\-9\\.\_@$%+#\:=<>\\\-]{0,62}
.. attribute:: class_type (key)
Type of class\-map
**type**\: :py:class:`PmapClassMapType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PmapClassMapType>`
.. attribute:: shape
Policy action shape
**type**\: :py:class:`Shape <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape>`
.. attribute:: min_bandwidth
Policy action minimum bandwidth queue
**type**\: :py:class:`MinBandwidth <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MinBandwidth>`
.. attribute:: bandwidth_remaining
Policy action bandwidth remaining queue
**type**\: :py:class:`BandwidthRemaining <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.BandwidthRemaining>`
.. attribute:: queue_limit
Policy action queue limit
**type**\: :py:class:`QueueLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.QueueLimit>`
.. attribute:: pfc
Policy action pfc
**type**\: :py:class:`Pfc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc>`
.. attribute:: priority_level
Priority level
**type**\: int
**range:** 1..7
.. attribute:: default_red
Default random early detection
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: ecn_red
ECN based random early detection
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: random_detect
Random early detection. All RED profiles in a class must be based on the same field
**type**\: list of :py:class:`RandomDetect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.RandomDetect>`
.. attribute:: set
Policy action packet marking
**type**\: :py:class:`Set <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Set>`
.. attribute:: police
Configures traffic policing action
**type**\: :py:class:`Police <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police>`
.. attribute:: service_policy
Configure a child service policy
**type**\: :py:class:`ServicePolicy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServicePolicy>`
.. attribute:: cac_local
Policy action CAC
**type**\: :py:class:`CacLocal <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal>`
.. attribute:: flow_params
Policy flow monitoring action
**type**\: :py:class:`FlowParams <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.FlowParams>`
.. attribute:: metrics_ipcbr
Policy IP\-CBR metric action
**type**\: :py:class:`MetricsIpcbr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr>`
.. attribute:: react
Policy action react
**type**\: :py:class:`React <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React>`
.. attribute:: http_redirect
Policy action http redirect. Redirect to this url
**type**\: str
.. attribute:: pbr_transmit
Policy action PBR transmit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: drop
Policy action drop
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: decap_gre
Policy action DECAP GRE
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pbr_redirect
Policy action redirect
**type**\: :py:class:`PbrRedirect <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect>`
.. attribute:: pbr_forward
Policy action PBR forward
**type**\: :py:class:`PbrForward <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward>`
.. attribute:: service_fragment
Policy action service fragment. Service fragment name
**type**\: str
.. attribute:: fragment
Policy action fragment. Fragment name
**type**\: str
.. attribute:: service_function_path
Policy action service function path
**type**\: :py:class:`ServiceFunctionPath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServiceFunctionPath>`
**presence node**\: True
.. attribute:: http_enrichment
HTTP Enrichment action
**type**\: :py:class:`HttpEnrichment <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.HttpEnrichment>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule, self).__init__()
self.yang_name = "policy-map-rule"
self.yang_parent_name = "policy-map"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['class_name','class_type']
self._child_classes = OrderedDict([("shape", ("shape", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape)), ("min-bandwidth", ("min_bandwidth", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MinBandwidth)), ("bandwidth-remaining", ("bandwidth_remaining", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.BandwidthRemaining)), ("queue-limit", ("queue_limit", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.QueueLimit)), ("pfc", ("pfc", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc)), ("random-detect", ("random_detect", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.RandomDetect)), ("set", ("set", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Set)), ("police", ("police", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police)), ("service-policy", ("service_policy", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServicePolicy)), ("cac-local", ("cac_local", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal)), ("flow-params", ("flow_params", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.FlowParams)), ("metrics-ipcbr", ("metrics_ipcbr", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr)), ("react", ("react", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React)), ("pbr-redirect", ("pbr_redirect", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect)), ("pbr-forward", ("pbr_forward", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward)), ("service-function-path", ("service_function_path", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServiceFunctionPath)), ("http-enrichment", ("http_enrichment", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.HttpEnrichment))])
self._leafs = OrderedDict([
('class_name', (YLeaf(YType.str, 'class-name'), ['str'])),
('class_type', (YLeaf(YType.enumeration, 'class-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg', 'PmapClassMapType', '')])),
('priority_level', (YLeaf(YType.uint8, 'priority-level'), ['int'])),
('default_red', (YLeaf(YType.empty, 'default-red'), ['Empty'])),
('ecn_red', (YLeaf(YType.empty, 'ecn-red'), ['Empty'])),
('http_redirect', (YLeaf(YType.str, 'http-redirect'), ['str'])),
('pbr_transmit', (YLeaf(YType.empty, 'pbr-transmit'), ['Empty'])),
('drop', (YLeaf(YType.empty, 'drop'), ['Empty'])),
('decap_gre', (YLeaf(YType.empty, 'decap-gre'), ['Empty'])),
('service_fragment', (YLeaf(YType.str, 'service-fragment'), ['str'])),
('fragment', (YLeaf(YType.str, 'fragment'), ['str'])),
])
self.class_name = None
self.class_type = None
self.priority_level = None
self.default_red = None
self.ecn_red = None
self.http_redirect = None
self.pbr_transmit = None
self.drop = None
self.decap_gre = None
self.service_fragment = None
self.fragment = None
self.shape = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape()
self.shape.parent = self
self._children_name_map["shape"] = "shape"
self.min_bandwidth = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MinBandwidth()
self.min_bandwidth.parent = self
self._children_name_map["min_bandwidth"] = "min-bandwidth"
self.bandwidth_remaining = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.BandwidthRemaining()
self.bandwidth_remaining.parent = self
self._children_name_map["bandwidth_remaining"] = "bandwidth-remaining"
self.queue_limit = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.QueueLimit()
self.queue_limit.parent = self
self._children_name_map["queue_limit"] = "queue-limit"
self.pfc = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc()
self.pfc.parent = self
self._children_name_map["pfc"] = "pfc"
self.set = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Set()
self.set.parent = self
self._children_name_map["set"] = "set"
self.police = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police()
self.police.parent = self
self._children_name_map["police"] = "police"
self.service_policy = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServicePolicy()
self.service_policy.parent = self
self._children_name_map["service_policy"] = "service-policy"
self.cac_local = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal()
self.cac_local.parent = self
self._children_name_map["cac_local"] = "cac-local"
self.flow_params = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.FlowParams()
self.flow_params.parent = self
self._children_name_map["flow_params"] = "flow-params"
self.metrics_ipcbr = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr()
self.metrics_ipcbr.parent = self
self._children_name_map["metrics_ipcbr"] = "metrics-ipcbr"
self.react = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React()
self.react.parent = self
self._children_name_map["react"] = "react"
self.pbr_redirect = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect()
self.pbr_redirect.parent = self
self._children_name_map["pbr_redirect"] = "pbr-redirect"
self.pbr_forward = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward()
self.pbr_forward.parent = self
self._children_name_map["pbr_forward"] = "pbr-forward"
self.service_function_path = None
self._children_name_map["service_function_path"] = "service-function-path"
self.http_enrichment = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.HttpEnrichment()
self.http_enrichment.parent = self
self._children_name_map["http_enrichment"] = "http-enrichment"
self.random_detect = YList(self)
self._segment_path = lambda: "policy-map-rule" + "[class-name='" + str(self.class_name) + "']" + "[class-type='" + str(self.class_type) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule, ['class_name', 'class_type', 'priority_level', 'default_red', 'ecn_red', 'http_redirect', 'pbr_transmit', 'drop', 'decap_gre', 'service_fragment', 'fragment'], name, value)
class Shape(_Entity_):
"""
Policy action shape.
.. attribute:: rate
Rate configuration
**type**\: :py:class:`Rate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Rate>`
.. attribute:: burst
Burst size configuration
**type**\: :py:class:`Burst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Burst>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape, self).__init__()
self.yang_name = "shape"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rate", ("rate", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Rate)), ("burst", ("burst", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Burst))])
self._leafs = OrderedDict()
self.rate = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Rate()
self.rate.parent = self
self._children_name_map["rate"] = "rate"
self.burst = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Burst()
self.burst.parent = self
self._children_name_map["burst"] = "burst"
self._segment_path = lambda: "shape"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape, [], name, value)
class Rate(_Entity_):
"""
Rate configuration.
.. attribute:: value
Shape bandwidth value
**type**\: union of the below types:
**type**\: str
**pattern:** [$][a\-zA\-Z0\-9][a\-zA\-Z0\-9\\.\_@%+\:\\\-]{0,32}[=]\\d{1,10}
**type**\: int
**range:** 1..4294967295
.. attribute:: unit
Shape bandwidth units
**type**\: str
**pattern:** (bps)\|(kbps)\|(mbps)\|(gbps)\|(percent)\|(per\-million)\|(per\-thousand)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Rate, self).__init__()
self.yang_name = "rate"
self.yang_parent_name = "shape"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.str, 'value'), ['str','int'])),
('unit', (YLeaf(YType.str, 'unit'), ['str'])),
])
self.value = None
self.unit = None
self._segment_path = lambda: "rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Rate, ['value', 'unit'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Rate']['meta_info']
class Burst(_Entity_):
"""
Burst size configuration.
.. attribute:: value
Burst size value
**type**\: int
**range:** 0..4294967295
.. attribute:: units
Burst size units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Burst, self).__init__()
self.yang_name = "burst"
self.yang_parent_name = "shape"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
])
self.value = None
self.units = None
self._segment_path = lambda: "burst"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Burst, ['value', 'units'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape.Burst']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Shape']['meta_info']
class MinBandwidth(_Entity_):
"""
Policy action minimum bandwidth queue.
.. attribute:: value
Minimum bandwidth value
**type**\: int
**range:** 0..4294967295
.. attribute:: unit
Minimum bandwidth units
**type**\: str
**pattern:** (bps)\|(kbps)\|(mbps)\|(gbps)\|(percent)\|(per\-million)\|(per\-thousand)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MinBandwidth, self).__init__()
self.yang_name = "min-bandwidth"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('unit', (YLeaf(YType.str, 'unit'), ['str'])),
])
self.value = None
self.unit = None
self._segment_path = lambda: "min-bandwidth"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MinBandwidth, ['value', 'unit'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MinBandwidth']['meta_info']
class BandwidthRemaining(_Entity_):
"""
Policy action bandwidth remaining queue.
.. attribute:: value
Remaining bandwidth value
**type**\: int
**range:** 0..4294967295
.. attribute:: unit
Remaining bandwidth units
**type**\: str
**pattern:** (percent)\|(ratio)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.BandwidthRemaining, self).__init__()
self.yang_name = "bandwidth-remaining"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('unit', (YLeaf(YType.str, 'unit'), ['str'])),
])
self.value = None
self.unit = None
self._segment_path = lambda: "bandwidth-remaining"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.BandwidthRemaining, ['value', 'unit'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.BandwidthRemaining']['meta_info']
class QueueLimit(_Entity_):
"""
Policy action queue limit.
.. attribute:: value
Remaining bandwidth value
**type**\: int
**range:** 0..4294967295
.. attribute:: unit
Remaining bandwidth units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)\|(percent)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.QueueLimit, self).__init__()
self.yang_name = "queue-limit"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('unit', (YLeaf(YType.str, 'unit'), ['str'])),
])
self.value = None
self.unit = None
self._segment_path = lambda: "queue-limit"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.QueueLimit, ['value', 'unit'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.QueueLimit']['meta_info']
class Pfc(_Entity_):
"""
Policy action pfc.
.. attribute:: pfc_pause_set
Pfc Pause set value
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: pfc_buffer_size
**type**\: :py:class:`PfcBufferSize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcBufferSize>`
.. attribute:: pfc_pause_threshold
**type**\: :py:class:`PfcPauseThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcPauseThreshold>`
.. attribute:: pfc_resume_threshold
**type**\: :py:class:`PfcResumeThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcResumeThreshold>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc, self).__init__()
self.yang_name = "pfc"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("pfc-buffer-size", ("pfc_buffer_size", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcBufferSize)), ("pfc-pause-threshold", ("pfc_pause_threshold", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcPauseThreshold)), ("pfc-resume-threshold", ("pfc_resume_threshold", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcResumeThreshold))])
self._leafs = OrderedDict([
('pfc_pause_set', (YLeaf(YType.empty, 'pfc-pause-set'), ['Empty'])),
])
self.pfc_pause_set = None
self.pfc_buffer_size = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcBufferSize()
self.pfc_buffer_size.parent = self
self._children_name_map["pfc_buffer_size"] = "pfc-buffer-size"
self.pfc_pause_threshold = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcPauseThreshold()
self.pfc_pause_threshold.parent = self
self._children_name_map["pfc_pause_threshold"] = "pfc-pause-threshold"
self.pfc_resume_threshold = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcResumeThreshold()
self.pfc_resume_threshold.parent = self
self._children_name_map["pfc_resume_threshold"] = "pfc-resume-threshold"
self._segment_path = lambda: "pfc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc, ['pfc_pause_set'], name, value)
class PfcBufferSize(_Entity_):
"""
.. attribute:: value
Pfc buffer size value
**type**\: int
**range:** 0..4294967295
.. attribute:: unit
Pfc buffer size units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcBufferSize, self).__init__()
self.yang_name = "pfc-buffer-size"
self.yang_parent_name = "pfc"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('unit', (YLeaf(YType.str, 'unit'), ['str'])),
])
self.value = None
self.unit = None
self._segment_path = lambda: "pfc-buffer-size"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcBufferSize, ['value', 'unit'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcBufferSize']['meta_info']
class PfcPauseThreshold(_Entity_):
"""
.. attribute:: value
Pfc pause threshold value
**type**\: int
**range:** 0..4294967295
.. attribute:: unit
Pfc pause threshold units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcPauseThreshold, self).__init__()
self.yang_name = "pfc-pause-threshold"
self.yang_parent_name = "pfc"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('unit', (YLeaf(YType.str, 'unit'), ['str'])),
])
self.value = None
self.unit = None
self._segment_path = lambda: "pfc-pause-threshold"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcPauseThreshold, ['value', 'unit'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcPauseThreshold']['meta_info']
class PfcResumeThreshold(_Entity_):
"""
.. attribute:: value
Pfc resume threshold value
**type**\: int
**range:** 0..4294967295
.. attribute:: unit
Pfc resume threshold units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcResumeThreshold, self).__init__()
self.yang_name = "pfc-resume-threshold"
self.yang_parent_name = "pfc"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('unit', (YLeaf(YType.str, 'unit'), ['str'])),
])
self.value = None
self.unit = None
self._segment_path = lambda: "pfc-resume-threshold"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcResumeThreshold, ['value', 'unit'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc.PfcResumeThreshold']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Pfc']['meta_info']
class RandomDetect(_Entity_):
"""
Random early detection.
All RED profiles in a class must be based
on the same field.
.. attribute:: threshold_min_value (key)
Minimum RED threshold value
**type**\: int
**range:** 0..4294967295
.. attribute:: threshold_min_units (key)
Minimum RED threshold units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
.. attribute:: threshold_max_value (key)
Maximum RED threshold value
**type**\: int
**range:** 0..4294967295
.. attribute:: threshold_max_units (key)
Maximum RED threshold units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
.. attribute:: cos
WRED based on CoS
**type**\: list of int
**range:** 0..7
.. attribute:: discard_class
WRED based on discard class
**type**\: list of int
**range:** 0..7
.. attribute:: dscp
WRED based on DSCP
**type**\: list of str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(([0\-9]\|[1\-5][0\-9]\|6[0\-3])\-([0\-9]\|[1\-5][0\-9]\|6[0\-3]))\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: mpls_exp
MPLS Experimental value based WRED
**type**\: list of int
**range:** 0..7
.. attribute:: precedence
WRED based on precedence
**type**\: union of the below types:
**type**\: list of int
**range:** 0..7
**type**\: list of str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: dei
DEI based WRED. Should be value 0..1
**type**\: list of int
**range:** 0..1
.. attribute:: ecn
ECN based WRED
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.RandomDetect, self).__init__()
self.yang_name = "random-detect"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['threshold_min_value','threshold_min_units','threshold_max_value','threshold_max_units']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('threshold_min_value', (YLeaf(YType.uint32, 'threshold-min-value'), ['int'])),
('threshold_min_units', (YLeaf(YType.str, 'threshold-min-units'), ['str'])),
('threshold_max_value', (YLeaf(YType.uint32, 'threshold-max-value'), ['int'])),
('threshold_max_units', (YLeaf(YType.str, 'threshold-max-units'), ['str'])),
('cos', (YLeafList(YType.uint8, 'cos'), ['int'])),
('discard_class', (YLeafList(YType.uint8, 'discard-class'), ['int'])),
('dscp', (YLeafList(YType.str, 'dscp'), ['str'])),
('mpls_exp', (YLeafList(YType.uint8, 'mpls-exp'), ['int'])),
('precedence', (YLeafList(YType.str, 'precedence'), ['int','str'])),
('dei', (YLeafList(YType.uint8, 'dei'), ['int'])),
('ecn', (YLeaf(YType.empty, 'ecn'), ['Empty'])),
])
self.threshold_min_value = None
self.threshold_min_units = None
self.threshold_max_value = None
self.threshold_max_units = None
self.cos = []
self.discard_class = []
self.dscp = []
self.mpls_exp = []
self.precedence = []
self.dei = []
self.ecn = None
self._segment_path = lambda: "random-detect" + "[threshold-min-value='" + str(self.threshold_min_value) + "']" + "[threshold-min-units='" + str(self.threshold_min_units) + "']" + "[threshold-max-value='" + str(self.threshold_max_value) + "']" + "[threshold-max-units='" + str(self.threshold_max_units) + "']"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.RandomDetect, ['threshold_min_value', 'threshold_min_units', 'threshold_max_value', 'threshold_max_units', 'cos', 'discard_class', 'dscp', 'mpls_exp', 'precedence', 'dei', 'ecn'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.RandomDetect']['meta_info']
class Set(_Entity_):
"""
Policy action packet marking.
.. attribute:: dscp
Marks a packet by setting the DSCP in the ToS byte
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: qos_group
Sets the QoS group identifiers on IPv4 or MPLS packets. The set qos\-group is supported only on an ingress policy
**type**\: int
**range:** 0..512
.. attribute:: traffic_class
Sets the Traffic class identifiers on IPv4 or MPLS packets
**type**\: int
**range:** 0..63
.. attribute:: discard_class
Sets the discard class on IPv4 or MPLS packets. The discard\-class can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: forward_class
Sets the forward class
**type**\: int
**range:** 0..7
.. attribute:: df
Set DF bit
**type**\: int
**range:** 0..1
.. attribute:: cos
Sets the specific IEEE 802.1Q Layer 2 CoS value of an outgoing packet. This command should be used by a router if a user wants to mark a packet that is being sent to a switch. Switches can leverage Layer 2 header information, including a CoS value marking. Packets entering an interface cannot be set with a CoS value
**type**\: int
**range:** 0..7
.. attribute:: inner_cos
Set inner cos
**type**\: int
**range:** 0..7
.. attribute:: precedence
Sets the precedence value in the IP header
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: precedence_tunnel
Sets the precedence tunnel value for ipsec
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: mpls_experimental_top_most
Sets the experimental value of the MPLS packet top\-most labels
**type**\: int
**range:** 0..7
.. attribute:: mpls_experimental_imposition
Sets the experimental value of the MPLS packet imposition labels. Imposition can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: srp_priority
Sets the spatial reuse protocol priority value of an outgoing packet
**type**\: int
**range:** 0..7
.. attribute:: fr_de
Set FrameRelay DE bit
**type**\: int
**range:** 0..1
.. attribute:: dei
Set DEI bit
**type**\: int
**range:** 0..1
.. attribute:: dei_imposition
Set DEI imposition bit
**type**\: int
**range:** 0..1
.. attribute:: source_address
Source IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_address
Destination IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: atm_clp
Set atm cell\-loss\-priority bit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: dscp_tunnel
Marks a packet by setting DSCP in the tunnel header. This is specific to ipsec tunnels
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Set, self).__init__()
self.yang_name = "set"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('dscp', (YLeaf(YType.str, 'dscp'), ['str'])),
('qos_group', (YLeaf(YType.uint16, 'qos-group'), ['int'])),
('traffic_class', (YLeaf(YType.uint8, 'traffic-class'), ['int'])),
('discard_class', (YLeaf(YType.uint8, 'discard-class'), ['int'])),
('forward_class', (YLeaf(YType.uint8, 'forward-class'), ['int'])),
('df', (YLeaf(YType.uint8, 'df'), ['int'])),
('cos', (YLeaf(YType.uint8, 'cos'), ['int'])),
('inner_cos', (YLeaf(YType.uint8, 'inner-cos'), ['int'])),
('precedence', (YLeaf(YType.str, 'precedence'), ['int','str'])),
('precedence_tunnel', (YLeaf(YType.str, 'precedence-tunnel'), ['int','str'])),
('mpls_experimental_top_most', (YLeaf(YType.uint8, 'mpls-experimental-top-most'), ['int'])),
('mpls_experimental_imposition', (YLeaf(YType.uint8, 'mpls-experimental-imposition'), ['int'])),
('srp_priority', (YLeaf(YType.uint8, 'srp-priority'), ['int'])),
('fr_de', (YLeaf(YType.uint8, 'fr-de'), ['int'])),
('dei', (YLeaf(YType.uint8, 'dei'), ['int'])),
('dei_imposition', (YLeaf(YType.uint8, 'dei-imposition'), ['int'])),
('source_address', (YLeaf(YType.str, 'source-address'), ['str'])),
('destination_address', (YLeaf(YType.str, 'destination-address'), ['str'])),
('atm_clp', (YLeaf(YType.empty, 'atm-clp'), ['Empty'])),
('dscp_tunnel', (YLeaf(YType.str, 'dscp-tunnel'), ['str'])),
])
self.dscp = None
self.qos_group = None
self.traffic_class = None
self.discard_class = None
self.forward_class = None
self.df = None
self.cos = None
self.inner_cos = None
self.precedence = None
self.precedence_tunnel = None
self.mpls_experimental_top_most = None
self.mpls_experimental_imposition = None
self.srp_priority = None
self.fr_de = None
self.dei = None
self.dei_imposition = None
self.source_address = None
self.destination_address = None
self.atm_clp = None
self.dscp_tunnel = None
self._segment_path = lambda: "set"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Set, ['dscp', 'qos_group', 'traffic_class', 'discard_class', 'forward_class', 'df', 'cos', 'inner_cos', 'precedence', 'precedence_tunnel', 'mpls_experimental_top_most', 'mpls_experimental_imposition', 'srp_priority', 'fr_de', 'dei', 'dei_imposition', 'source_address', 'destination_address', 'atm_clp', 'dscp_tunnel'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Set']['meta_info']
class Police(_Entity_):
"""
Configures traffic policing action.
.. attribute:: rate
Rate configuration
**type**\: :py:class:`Rate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Rate>`
.. attribute:: peak_rate
Peak rate configuration
**type**\: :py:class:`PeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakRate>`
.. attribute:: burst
Burst configuration
**type**\: :py:class:`Burst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Burst>`
.. attribute:: peak_burst
Peak burst configuration
**type**\: :py:class:`PeakBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakBurst>`
.. attribute:: conform_action
Configures the action to take on packets that conform to the rate limit
**type**\: :py:class:`ConformAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction>`
.. attribute:: exceed_action
Configures the action to take on packets that exceed the rate limit
**type**\: :py:class:`ExceedAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction>`
.. attribute:: violate_action
Configures the action to take on packets that violate the rate limit
**type**\: :py:class:`ViolateAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police, self).__init__()
self.yang_name = "police"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rate", ("rate", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Rate)), ("peak-rate", ("peak_rate", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakRate)), ("burst", ("burst", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Burst)), ("peak-burst", ("peak_burst", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakBurst)), ("conform-action", ("conform_action", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction)), ("exceed-action", ("exceed_action", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction)), ("violate-action", ("violate_action", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction))])
self._leafs = OrderedDict()
self.rate = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Rate()
self.rate.parent = self
self._children_name_map["rate"] = "rate"
self.peak_rate = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakRate()
self.peak_rate.parent = self
self._children_name_map["peak_rate"] = "peak-rate"
self.burst = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Burst()
self.burst.parent = self
self._children_name_map["burst"] = "burst"
self.peak_burst = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakBurst()
self.peak_burst.parent = self
self._children_name_map["peak_burst"] = "peak-burst"
self.conform_action = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction()
self.conform_action.parent = self
self._children_name_map["conform_action"] = "conform-action"
self.exceed_action = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction()
self.exceed_action.parent = self
self._children_name_map["exceed_action"] = "exceed-action"
self.violate_action = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction()
self.violate_action.parent = self
self._children_name_map["violate_action"] = "violate-action"
self._segment_path = lambda: "police"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police, [], name, value)
class Rate(_Entity_):
"""
Rate configuration.
.. attribute:: value
Rate value
**type**\: int
**range:** 0..4294967295
.. attribute:: units
Rate units
**type**\: str
**pattern:** (bps)\|(kbps)\|(mbps)\|(gbps)\|(pps)\|(percent)\|(cellsps)\|(per\-thousand)\|(per\-million)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Rate, self).__init__()
self.yang_name = "rate"
self.yang_parent_name = "police"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
])
self.value = None
self.units = None
self._segment_path = lambda: "rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Rate, ['value', 'units'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Rate']['meta_info']
class PeakRate(_Entity_):
"""
Peak rate configuration.
.. attribute:: value
Peak rate value
**type**\: int
**range:** 0..4294967295
.. attribute:: units
Peak rate units
**type**\: str
**pattern:** (bps)\|(kbps)\|(mbps)\|(gbps)\|(pps)\|(percent)\|(cellsps)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakRate, self).__init__()
self.yang_name = "peak-rate"
self.yang_parent_name = "police"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
])
self.value = None
self.units = None
self._segment_path = lambda: "peak-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakRate, ['value', 'units'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakRate']['meta_info']
class Burst(_Entity_):
"""
Burst configuration.
.. attribute:: value
Burst value
**type**\: int
**range:** 0..4294967295
.. attribute:: units
Burst units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Burst, self).__init__()
self.yang_name = "burst"
self.yang_parent_name = "police"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
])
self.value = None
self.units = None
self._segment_path = lambda: "burst"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Burst, ['value', 'units'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.Burst']['meta_info']
class PeakBurst(_Entity_):
"""
Peak burst configuration.
.. attribute:: value
Peak burst value
**type**\: int
**range:** 0..4294967295
.. attribute:: units
Peak burst units
**type**\: str
**pattern:** (bytes)\|(kbytes)\|(mbytes)\|(gbytes)\|(us)\|(ms)\|(packets)\|(cells)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakBurst, self).__init__()
self.yang_name = "peak-burst"
self.yang_parent_name = "police"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
])
self.value = None
self.units = None
self._segment_path = lambda: "peak-burst"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakBurst, ['value', 'units'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.PeakBurst']['meta_info']
class ConformAction(_Entity_):
"""
Configures the action to take on packets that conform
to the rate limit.
.. attribute:: transmit
Police action transmit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: drop
Police action drop
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: set
Police action packet marking
**type**\: :py:class:`Set <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction.Set>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction, self).__init__()
self.yang_name = "conform-action"
self.yang_parent_name = "police"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("set", ("set", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction.Set))])
self._leafs = OrderedDict([
('transmit', (YLeaf(YType.empty, 'Transmit'), ['Empty'])),
('drop', (YLeaf(YType.empty, 'drop'), ['Empty'])),
])
self.transmit = None
self.drop = None
self.set = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction.Set()
self.set.parent = self
self._children_name_map["set"] = "set"
self._segment_path = lambda: "conform-action"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction, ['transmit', 'drop'], name, value)
class Set(_Entity_):
"""
Police action packet marking.
.. attribute:: dscp
Marks a packet by setting the DSCP in the ToS byte
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: qos_group
Sets the QoS group identifiers on IPv4 or MPLS packets. The set qos\-group is supported only on an ingress policy
**type**\: int
**range:** 0..512
.. attribute:: traffic_class
Sets the Traffic class identifiers on IPv4 or MPLS packets
**type**\: int
**range:** 0..63
.. attribute:: discard_class
Sets the discard class on IPv4 or MPLS packets. The discard\-class can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: forward_class
Sets the forward class
**type**\: int
**range:** 0..7
.. attribute:: df
Set DF bit
**type**\: int
**range:** 0..1
.. attribute:: cos
Sets the specific IEEE 802.1Q Layer 2 CoS value of an outgoing packet. This command should be used by a router if a user wants to mark a packet that is being sent to a switch. Switches can leverage Layer 2 header information, including a CoS value marking. Packets entering an interface cannot be set with a CoS value
**type**\: int
**range:** 0..7
.. attribute:: inner_cos
Set inner cos
**type**\: int
**range:** 0..7
.. attribute:: precedence
Sets the precedence value in the IP header
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: precedence_tunnel
Sets the precedence tunnel value for ipsec
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: mpls_experimental_top_most
Sets the experimental value of the MPLS packet top\-most labels
**type**\: int
**range:** 0..7
.. attribute:: mpls_experimental_imposition
Sets the experimental value of the MPLS packet imposition labels. Imposition can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: srp_priority
Sets the spatial reuse protocol priority value of an outgoing packet
**type**\: int
**range:** 0..7
.. attribute:: fr_de
Set FrameRelay DE bit
**type**\: int
**range:** 0..1
.. attribute:: dei
Set DEI bit
**type**\: int
**range:** 0..1
.. attribute:: dei_imposition
Set DEI imposition bit
**type**\: int
**range:** 0..1
.. attribute:: source_address
Source IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_address
Destination IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: atm_clp
Set atm cell\-loss\-priority bit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: dscp_tunnel
Marks a packet by setting DSCP in the tunnel header. This is specific to ipsec tunnels
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction.Set, self).__init__()
self.yang_name = "set"
self.yang_parent_name = "conform-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('dscp', (YLeaf(YType.str, 'dscp'), ['str'])),
('qos_group', (YLeaf(YType.uint16, 'qos-group'), ['int'])),
('traffic_class', (YLeaf(YType.uint8, 'traffic-class'), ['int'])),
('discard_class', (YLeaf(YType.uint8, 'discard-class'), ['int'])),
('forward_class', (YLeaf(YType.uint8, 'forward-class'), ['int'])),
('df', (YLeaf(YType.uint8, 'df'), ['int'])),
('cos', (YLeaf(YType.uint8, 'cos'), ['int'])),
('inner_cos', (YLeaf(YType.uint8, 'inner-cos'), ['int'])),
('precedence', (YLeaf(YType.str, 'precedence'), ['int','str'])),
('precedence_tunnel', (YLeaf(YType.str, 'precedence-tunnel'), ['int','str'])),
('mpls_experimental_top_most', (YLeaf(YType.uint8, 'mpls-experimental-top-most'), ['int'])),
('mpls_experimental_imposition', (YLeaf(YType.uint8, 'mpls-experimental-imposition'), ['int'])),
('srp_priority', (YLeaf(YType.uint8, 'srp-priority'), ['int'])),
('fr_de', (YLeaf(YType.uint8, 'fr-de'), ['int'])),
('dei', (YLeaf(YType.uint8, 'dei'), ['int'])),
('dei_imposition', (YLeaf(YType.uint8, 'dei-imposition'), ['int'])),
('source_address', (YLeaf(YType.str, 'source-address'), ['str'])),
('destination_address', (YLeaf(YType.str, 'destination-address'), ['str'])),
('atm_clp', (YLeaf(YType.empty, 'atm-clp'), ['Empty'])),
('dscp_tunnel', (YLeaf(YType.str, 'dscp-tunnel'), ['str'])),
])
self.dscp = None
self.qos_group = None
self.traffic_class = None
self.discard_class = None
self.forward_class = None
self.df = None
self.cos = None
self.inner_cos = None
self.precedence = None
self.precedence_tunnel = None
self.mpls_experimental_top_most = None
self.mpls_experimental_imposition = None
self.srp_priority = None
self.fr_de = None
self.dei = None
self.dei_imposition = None
self.source_address = None
self.destination_address = None
self.atm_clp = None
self.dscp_tunnel = None
self._segment_path = lambda: "set"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction.Set, ['dscp', 'qos_group', 'traffic_class', 'discard_class', 'forward_class', 'df', 'cos', 'inner_cos', 'precedence', 'precedence_tunnel', 'mpls_experimental_top_most', 'mpls_experimental_imposition', 'srp_priority', 'fr_de', 'dei', 'dei_imposition', 'source_address', 'destination_address', 'atm_clp', 'dscp_tunnel'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction.Set']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ConformAction']['meta_info']
class ExceedAction(_Entity_):
"""
Configures the action to take on packets that exceed
the rate limit.
.. attribute:: transmit
Police action transmit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: drop
Police action drop
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: set
Police action packet marking
**type**\: :py:class:`Set <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction.Set>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction, self).__init__()
self.yang_name = "exceed-action"
self.yang_parent_name = "police"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("set", ("set", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction.Set))])
self._leafs = OrderedDict([
('transmit', (YLeaf(YType.empty, 'Transmit'), ['Empty'])),
('drop', (YLeaf(YType.empty, 'drop'), ['Empty'])),
])
self.transmit = None
self.drop = None
self.set = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction.Set()
self.set.parent = self
self._children_name_map["set"] = "set"
self._segment_path = lambda: "exceed-action"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction, ['transmit', 'drop'], name, value)
class Set(_Entity_):
"""
Police action packet marking.
.. attribute:: dscp
Marks a packet by setting the DSCP in the ToS byte
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: qos_group
Sets the QoS group identifiers on IPv4 or MPLS packets. The set qos\-group is supported only on an ingress policy
**type**\: int
**range:** 0..512
.. attribute:: traffic_class
Sets the Traffic class identifiers on IPv4 or MPLS packets
**type**\: int
**range:** 0..63
.. attribute:: discard_class
Sets the discard class on IPv4 or MPLS packets. The discard\-class can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: forward_class
Sets the forward class
**type**\: int
**range:** 0..7
.. attribute:: df
Set DF bit
**type**\: int
**range:** 0..1
.. attribute:: cos
Sets the specific IEEE 802.1Q Layer 2 CoS value of an outgoing packet. This command should be used by a router if a user wants to mark a packet that is being sent to a switch. Switches can leverage Layer 2 header information, including a CoS value marking. Packets entering an interface cannot be set with a CoS value
**type**\: int
**range:** 0..7
.. attribute:: inner_cos
Set inner cos
**type**\: int
**range:** 0..7
.. attribute:: precedence
Sets the precedence value in the IP header
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: precedence_tunnel
Sets the precedence tunnel value for ipsec
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: mpls_experimental_top_most
Sets the experimental value of the MPLS packet top\-most labels
**type**\: int
**range:** 0..7
.. attribute:: mpls_experimental_imposition
Sets the experimental value of the MPLS packet imposition labels. Imposition can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: srp_priority
Sets the spatial reuse protocol priority value of an outgoing packet
**type**\: int
**range:** 0..7
.. attribute:: fr_de
Set FrameRelay DE bit
**type**\: int
**range:** 0..1
.. attribute:: dei
Set DEI bit
**type**\: int
**range:** 0..1
.. attribute:: dei_imposition
Set DEI imposition bit
**type**\: int
**range:** 0..1
.. attribute:: source_address
Source IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_address
Destination IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: atm_clp
Set atm cell\-loss\-priority bit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: dscp_tunnel
Marks a packet by setting DSCP in the tunnel header. This is specific to ipsec tunnels
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction.Set, self).__init__()
self.yang_name = "set"
self.yang_parent_name = "exceed-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('dscp', (YLeaf(YType.str, 'dscp'), ['str'])),
('qos_group', (YLeaf(YType.uint16, 'qos-group'), ['int'])),
('traffic_class', (YLeaf(YType.uint8, 'traffic-class'), ['int'])),
('discard_class', (YLeaf(YType.uint8, 'discard-class'), ['int'])),
('forward_class', (YLeaf(YType.uint8, 'forward-class'), ['int'])),
('df', (YLeaf(YType.uint8, 'df'), ['int'])),
('cos', (YLeaf(YType.uint8, 'cos'), ['int'])),
('inner_cos', (YLeaf(YType.uint8, 'inner-cos'), ['int'])),
('precedence', (YLeaf(YType.str, 'precedence'), ['int','str'])),
('precedence_tunnel', (YLeaf(YType.str, 'precedence-tunnel'), ['int','str'])),
('mpls_experimental_top_most', (YLeaf(YType.uint8, 'mpls-experimental-top-most'), ['int'])),
('mpls_experimental_imposition', (YLeaf(YType.uint8, 'mpls-experimental-imposition'), ['int'])),
('srp_priority', (YLeaf(YType.uint8, 'srp-priority'), ['int'])),
('fr_de', (YLeaf(YType.uint8, 'fr-de'), ['int'])),
('dei', (YLeaf(YType.uint8, 'dei'), ['int'])),
('dei_imposition', (YLeaf(YType.uint8, 'dei-imposition'), ['int'])),
('source_address', (YLeaf(YType.str, 'source-address'), ['str'])),
('destination_address', (YLeaf(YType.str, 'destination-address'), ['str'])),
('atm_clp', (YLeaf(YType.empty, 'atm-clp'), ['Empty'])),
('dscp_tunnel', (YLeaf(YType.str, 'dscp-tunnel'), ['str'])),
])
self.dscp = None
self.qos_group = None
self.traffic_class = None
self.discard_class = None
self.forward_class = None
self.df = None
self.cos = None
self.inner_cos = None
self.precedence = None
self.precedence_tunnel = None
self.mpls_experimental_top_most = None
self.mpls_experimental_imposition = None
self.srp_priority = None
self.fr_de = None
self.dei = None
self.dei_imposition = None
self.source_address = None
self.destination_address = None
self.atm_clp = None
self.dscp_tunnel = None
self._segment_path = lambda: "set"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction.Set, ['dscp', 'qos_group', 'traffic_class', 'discard_class', 'forward_class', 'df', 'cos', 'inner_cos', 'precedence', 'precedence_tunnel', 'mpls_experimental_top_most', 'mpls_experimental_imposition', 'srp_priority', 'fr_de', 'dei', 'dei_imposition', 'source_address', 'destination_address', 'atm_clp', 'dscp_tunnel'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction.Set']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ExceedAction']['meta_info']
class ViolateAction(_Entity_):
"""
Configures the action to take on packets that violate
the rate limit.
.. attribute:: transmit
Police action transmit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: drop
Police action drop
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: set
Police action packet marking
**type**\: :py:class:`Set <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction.Set>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction, self).__init__()
self.yang_name = "violate-action"
self.yang_parent_name = "police"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("set", ("set", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction.Set))])
self._leafs = OrderedDict([
('transmit', (YLeaf(YType.empty, 'Transmit'), ['Empty'])),
('drop', (YLeaf(YType.empty, 'drop'), ['Empty'])),
])
self.transmit = None
self.drop = None
self.set = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction.Set()
self.set.parent = self
self._children_name_map["set"] = "set"
self._segment_path = lambda: "violate-action"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction, ['transmit', 'drop'], name, value)
class Set(_Entity_):
"""
Police action packet marking.
.. attribute:: dscp
Marks a packet by setting the DSCP in the ToS byte
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
.. attribute:: qos_group
Sets the QoS group identifiers on IPv4 or MPLS packets. The set qos\-group is supported only on an ingress policy
**type**\: int
**range:** 0..512
.. attribute:: traffic_class
Sets the Traffic class identifiers on IPv4 or MPLS packets
**type**\: int
**range:** 0..63
.. attribute:: discard_class
Sets the discard class on IPv4 or MPLS packets. The discard\-class can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: forward_class
Sets the forward class
**type**\: int
**range:** 0..7
.. attribute:: df
Set DF bit
**type**\: int
**range:** 0..1
.. attribute:: cos
Sets the specific IEEE 802.1Q Layer 2 CoS value of an outgoing packet. This command should be used by a router if a user wants to mark a packet that is being sent to a switch. Switches can leverage Layer 2 header information, including a CoS value marking. Packets entering an interface cannot be set with a CoS value
**type**\: int
**range:** 0..7
.. attribute:: inner_cos
Set inner cos
**type**\: int
**range:** 0..7
.. attribute:: precedence
Sets the precedence value in the IP header
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: precedence_tunnel
Sets the precedence tunnel value for ipsec
**type**\: union of the below types:
**type**\: int
**range:** 0..7
**type**\: str
**pattern:** (critical)\|(flash)\|(flash\-override)\|(immediate)\|(internet)\|(network)\|(priority)\|(routine)
.. attribute:: mpls_experimental_top_most
Sets the experimental value of the MPLS packet top\-most labels
**type**\: int
**range:** 0..7
.. attribute:: mpls_experimental_imposition
Sets the experimental value of the MPLS packet imposition labels. Imposition can be used only in service policies that are attached in the ingress policy
**type**\: int
**range:** 0..7
.. attribute:: srp_priority
Sets the spatial reuse protocol priority value of an outgoing packet
**type**\: int
**range:** 0..7
.. attribute:: fr_de
Set FrameRelay DE bit
**type**\: int
**range:** 0..1
.. attribute:: dei
Set DEI bit
**type**\: int
**range:** 0..1
.. attribute:: dei_imposition
Set DEI imposition bit
**type**\: int
**range:** 0..1
.. attribute:: source_address
Source IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: destination_address
Destination IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: atm_clp
Set atm cell\-loss\-priority bit
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: dscp_tunnel
Marks a packet by setting DSCP in the tunnel header. This is specific to ipsec tunnels
**type**\: str
**pattern:** ([0\-9]\|[1\-5][0\-9]\|6[0\-3])\|(af11)\|(af12)\|(af13)\|(af21)\|(af22)\|(af23)\|(af31)\|(af32)\|(af33)\|(af41)\|(af42)\|(af43)\|(ef)\|(default)\|(cs1)\|(cs2)\|(cs3)\|(cs4)\|(cs5)\|(cs6)\|(cs7)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction.Set, self).__init__()
self.yang_name = "set"
self.yang_parent_name = "violate-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('dscp', (YLeaf(YType.str, 'dscp'), ['str'])),
('qos_group', (YLeaf(YType.uint16, 'qos-group'), ['int'])),
('traffic_class', (YLeaf(YType.uint8, 'traffic-class'), ['int'])),
('discard_class', (YLeaf(YType.uint8, 'discard-class'), ['int'])),
('forward_class', (YLeaf(YType.uint8, 'forward-class'), ['int'])),
('df', (YLeaf(YType.uint8, 'df'), ['int'])),
('cos', (YLeaf(YType.uint8, 'cos'), ['int'])),
('inner_cos', (YLeaf(YType.uint8, 'inner-cos'), ['int'])),
('precedence', (YLeaf(YType.str, 'precedence'), ['int','str'])),
('precedence_tunnel', (YLeaf(YType.str, 'precedence-tunnel'), ['int','str'])),
('mpls_experimental_top_most', (YLeaf(YType.uint8, 'mpls-experimental-top-most'), ['int'])),
('mpls_experimental_imposition', (YLeaf(YType.uint8, 'mpls-experimental-imposition'), ['int'])),
('srp_priority', (YLeaf(YType.uint8, 'srp-priority'), ['int'])),
('fr_de', (YLeaf(YType.uint8, 'fr-de'), ['int'])),
('dei', (YLeaf(YType.uint8, 'dei'), ['int'])),
('dei_imposition', (YLeaf(YType.uint8, 'dei-imposition'), ['int'])),
('source_address', (YLeaf(YType.str, 'source-address'), ['str'])),
('destination_address', (YLeaf(YType.str, 'destination-address'), ['str'])),
('atm_clp', (YLeaf(YType.empty, 'atm-clp'), ['Empty'])),
('dscp_tunnel', (YLeaf(YType.str, 'dscp-tunnel'), ['str'])),
])
self.dscp = None
self.qos_group = None
self.traffic_class = None
self.discard_class = None
self.forward_class = None
self.df = None
self.cos = None
self.inner_cos = None
self.precedence = None
self.precedence_tunnel = None
self.mpls_experimental_top_most = None
self.mpls_experimental_imposition = None
self.srp_priority = None
self.fr_de = None
self.dei = None
self.dei_imposition = None
self.source_address = None
self.destination_address = None
self.atm_clp = None
self.dscp_tunnel = None
self._segment_path = lambda: "set"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction.Set, ['dscp', 'qos_group', 'traffic_class', 'discard_class', 'forward_class', 'df', 'cos', 'inner_cos', 'precedence', 'precedence_tunnel', 'mpls_experimental_top_most', 'mpls_experimental_imposition', 'srp_priority', 'fr_de', 'dei', 'dei_imposition', 'source_address', 'destination_address', 'atm_clp', 'dscp_tunnel'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction.Set']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police.ViolateAction']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.Police']['meta_info']
class ServicePolicy(_Entity_):
"""
Configure a child service policy.
.. attribute:: policy_name
Name of service\-policy
**type**\: str
**pattern:** [a\-zA\-Z0\-9][a\-zA\-Z0\-9\\.\_@$%+#\:=<>\\\-]{0,62}
.. attribute:: type
Type of service\-policy
**type**\: str
**pattern:** (PBR)\|(QOS)\|(REDIRECT)\|(TRAFFIC)\|(pbr)\|(qos)\|(redirect)\|(traffic)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServicePolicy, self).__init__()
self.yang_name = "service-policy"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_name', (YLeaf(YType.str, 'policy-name'), ['str'])),
('type', (YLeaf(YType.str, 'type'), ['str'])),
])
self.policy_name = None
self.type = None
self._segment_path = lambda: "service-policy"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServicePolicy, ['policy_name', 'type'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServicePolicy']['meta_info']
class CacLocal(_Entity_):
"""
Policy action CAC.
.. attribute:: rate
The rate allocated for all flows
**type**\: :py:class:`Rate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.Rate>`
.. attribute:: flow_rate
The rate allocated per flow
**type**\: :py:class:`FlowRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.FlowRate>`
.. attribute:: flow_idle_timeout
The interval after which a flow is removed, if there is no activity. If timeout is 0 this flow does not expire
**type**\: union of the below types:
**type**\: int
**range:** 10..2550
**type**\: str
**pattern:** (None)\|(none)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal, self).__init__()
self.yang_name = "cac-local"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rate", ("rate", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.Rate)), ("flow-rate", ("flow_rate", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.FlowRate))])
self._leafs = OrderedDict([
('flow_idle_timeout', (YLeaf(YType.str, 'flow-idle-timeout'), ['int','str'])),
])
self.flow_idle_timeout = None
self.rate = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.Rate()
self.rate.parent = self
self._children_name_map["rate"] = "rate"
self.flow_rate = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.FlowRate()
self.flow_rate.parent = self
self._children_name_map["flow_rate"] = "flow-rate"
self._segment_path = lambda: "cac-local"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal, ['flow_idle_timeout'], name, value)
class Rate(_Entity_):
"""
The rate allocated for all flows.
.. attribute:: value
Rate value
**type**\: int
**range:** 1..4294967295
.. attribute:: units
Rate units
**type**\: str
**pattern:** (bps)\|(kbps)\|(mbps)\|(gbps)\|(cellsps)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.Rate, self).__init__()
self.yang_name = "rate"
self.yang_parent_name = "cac-local"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
])
self.value = None
self.units = None
self._segment_path = lambda: "rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.Rate, ['value', 'units'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.Rate']['meta_info']
class FlowRate(_Entity_):
"""
The rate allocated per flow.
.. attribute:: value
Rate value
**type**\: int
**range:** 1..4294967295
.. attribute:: units
Rate units
**type**\: str
**pattern:** (bps)\|(kbps)\|(mbps)\|(gbps)\|(cellsps)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.FlowRate, self).__init__()
self.yang_name = "flow-rate"
self.yang_parent_name = "cac-local"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('value', (YLeaf(YType.uint32, 'value'), ['int'])),
('units', (YLeaf(YType.str, 'units'), ['str'])),
])
self.value = None
self.units = None
self._segment_path = lambda: "flow-rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.FlowRate, ['value', 'units'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal.FlowRate']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.CacLocal']['meta_info']
class FlowParams(_Entity_):
"""
Policy flow monitoring action.
.. attribute:: max_flow
Max simultaneous flows monitored per policy class
**type**\: int
**range:** 0..4096
.. attribute:: interval_duration
Monitored interval duration
**type**\: int
**range:** 0..4294967295
**units**\: seconds
.. attribute:: history
Keep stats/metrics on box for so many intervals
**type**\: int
**range:** 0..4294967295
.. attribute:: timeout
Declare a flow dead if no packets received in so much time
**type**\: int
**range:** 0..4294967295
**units**\: seconds
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.FlowParams, self).__init__()
self.yang_name = "flow-params"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('max_flow', (YLeaf(YType.uint16, 'max-flow'), ['int'])),
('interval_duration', (YLeaf(YType.uint32, 'interval-duration'), ['int'])),
('history', (YLeaf(YType.uint32, 'history'), ['int'])),
('timeout', (YLeaf(YType.uint32, 'timeout'), ['int'])),
])
self.max_flow = None
self.interval_duration = None
self.history = None
self.timeout = None
self._segment_path = lambda: "flow-params"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.FlowParams, ['max_flow', 'interval_duration', 'history', 'timeout'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.FlowParams']['meta_info']
class MetricsIpcbr(_Entity_):
"""
Policy IP\-CBR metric action.
.. attribute:: rate
Nominal per\-flow data rate
**type**\: :py:class:`Rate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.Rate>`
.. attribute:: media_packet
Media\-packet structure
**type**\: :py:class:`MediaPacket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.MediaPacket>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr, self).__init__()
self.yang_name = "metrics-ipcbr"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("rate", ("rate", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.Rate)), ("media-packet", ("media_packet", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.MediaPacket))])
self._leafs = OrderedDict()
self.rate = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.Rate()
self.rate.parent = self
self._children_name_map["rate"] = "rate"
self.media_packet = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.MediaPacket()
self.media_packet.parent = self
self._children_name_map["media_packet"] = "media-packet"
self._segment_path = lambda: "metrics-ipcbr"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr, [], name, value)
class Rate(_Entity_):
"""
Nominal per\-flow data rate.
.. attribute:: layer3
Nominal rate specified at the L3 (IP)
**type**\: int
**range:** 0..4294967295
**units**\: bps
.. attribute:: packet
Nominal IP layer packet rate (in pps)
**type**\: int
**range:** 0..4294967295
**units**\: pps
.. attribute:: media
Nominal data rate of the media flow (ip payload)
**type**\: int
**range:** 1..3000000000
**units**\: bps
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.Rate, self).__init__()
self.yang_name = "rate"
self.yang_parent_name = "metrics-ipcbr"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('layer3', (YLeaf(YType.uint32, 'layer3'), ['int'])),
('packet', (YLeaf(YType.uint32, 'packet'), ['int'])),
('media', (YLeaf(YType.uint32, 'media'), ['int'])),
])
self.layer3 = None
self.packet = None
self.media = None
self._segment_path = lambda: "rate"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.Rate, ['layer3', 'packet', 'media'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.Rate']['meta_info']
class MediaPacket(_Entity_):
"""
Media\-packet structure.
.. attribute:: size
Nominal size of the media\-packet
**type**\: int
**range:** 0..65535
**units**\: bytes
.. attribute:: count_in_layer3
Nominal number of media packets in an IP payload
**type**\: int
**range:** 1..64
**units**\: packets
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.MediaPacket, self).__init__()
self.yang_name = "media-packet"
self.yang_parent_name = "metrics-ipcbr"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('size', (YLeaf(YType.uint16, 'size'), ['int'])),
('count_in_layer3', (YLeaf(YType.uint8, 'count-in-layer3'), ['int'])),
])
self.size = None
self.count_in_layer3 = None
self._segment_path = lambda: "media-packet"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.MediaPacket, ['size', 'count_in_layer3'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr.MediaPacket']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.MetricsIpcbr']['meta_info']
class React(_Entity_):
"""
Policy action react.
.. attribute:: descrition
String describing the react statement
**type**\: str
.. attribute:: action
Action on alert
**type**\: :py:class:`Action <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Action>`
.. attribute:: alarm
Alaram settings
**type**\: :py:class:`Alarm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm>`
.. attribute:: threshold
Alarm threshold settings
**type**\: :py:class:`Threshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold>`
.. attribute:: criterion_delay_factor
React criterion delay factor
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: criterion_media_stop
React criterion media stop
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: criterion_mrv
React criterion mrv
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: criterion_flow_count
React criterion flow count
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: criterion_packet_rate
React criterion packet rate
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React, self).__init__()
self.yang_name = "react"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("action", ("action", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Action)), ("alarm", ("alarm", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm)), ("threshold", ("threshold", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold))])
self._leafs = OrderedDict([
('descrition', (YLeaf(YType.str, 'descrition'), ['str'])),
('criterion_delay_factor', (YLeaf(YType.empty, 'criterion-delay-factor'), ['Empty'])),
('criterion_media_stop', (YLeaf(YType.empty, 'criterion-media-stop'), ['Empty'])),
('criterion_mrv', (YLeaf(YType.empty, 'criterion-mrv'), ['Empty'])),
('criterion_flow_count', (YLeaf(YType.empty, 'criterion-flow-count'), ['Empty'])),
('criterion_packet_rate', (YLeaf(YType.empty, 'criterion-packet-rate'), ['Empty'])),
])
self.descrition = None
self.criterion_delay_factor = None
self.criterion_media_stop = None
self.criterion_mrv = None
self.criterion_flow_count = None
self.criterion_packet_rate = None
self.action = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Action()
self.action.parent = self
self._children_name_map["action"] = "action"
self.alarm = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm()
self.alarm.parent = self
self._children_name_map["alarm"] = "alarm"
self.threshold = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold()
self.threshold.parent = self
self._children_name_map["threshold"] = "threshold"
self._segment_path = lambda: "react"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React, ['descrition', 'criterion_delay_factor', 'criterion_media_stop', 'criterion_mrv', 'criterion_flow_count', 'criterion_packet_rate'], name, value)
class Action(_Entity_):
"""
Action on alert.
.. attribute:: syslog
Syslog
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: snmp
SNMP
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Action, self).__init__()
self.yang_name = "action"
self.yang_parent_name = "react"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('syslog', (YLeaf(YType.empty, 'syslog'), ['Empty'])),
('snmp', (YLeaf(YType.empty, 'snmp'), ['Empty'])),
])
self.syslog = None
self.snmp = None
self._segment_path = lambda: "action"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Action, ['syslog', 'snmp'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Action']['meta_info']
class Alarm(_Entity_):
"""
Alaram settings.
.. attribute:: type
Alarm type
**type**\: :py:class:`Type <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm.Type>`
.. attribute:: severity
Severity of the alarm
**type**\: str
**pattern:** (informational)\|(notification)\|(warning)\|(error)\|(critical)\|(alert)\|(emergency)
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm, self).__init__()
self.yang_name = "alarm"
self.yang_parent_name = "react"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("type", ("type", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm.Type))])
self._leafs = OrderedDict([
('severity', (YLeaf(YType.str, 'severity'), ['str'])),
])
self.severity = None
self.type = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm.Type()
self.type.parent = self
self._children_name_map["type"] = "type"
self._segment_path = lambda: "alarm"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm, ['severity'], name, value)
class Type(_Entity_):
"""
Alarm type.
.. attribute:: discrete
Discrete alarm type
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: group_count
Number of flows to reach before triggering alarm
**type**\: int
**range:** 0..65535
**units**\: number of flows
.. attribute:: group_percent
Percent to reach before triggering alarm
**type**\: int
**range:** 0..65535
**units**\: percentage
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm.Type, self).__init__()
self.yang_name = "type"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('discrete', (YLeaf(YType.empty, 'discrete'), ['Empty'])),
('group_count', (YLeaf(YType.uint16, 'group-count'), ['int'])),
('group_percent', (YLeaf(YType.uint16, 'group-percent'), ['int'])),
])
self.discrete = None
self.group_count = None
self.group_percent = None
self._segment_path = lambda: "type"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm.Type, ['discrete', 'group_count', 'group_percent'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm.Type']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Alarm']['meta_info']
class Threshold(_Entity_):
"""
Alarm threshold settings.
.. attribute:: trigger_value
Alarm trigger value settings
**type**\: :py:class:`TriggerValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerValue>`
.. attribute:: trigger_type
Alarm trigger type settings
**type**\: :py:class:`TriggerType <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerType>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold, self).__init__()
self.yang_name = "threshold"
self.yang_parent_name = "react"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("trigger-value", ("trigger_value", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerValue)), ("trigger-type", ("trigger_type", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerType))])
self._leafs = OrderedDict()
self.trigger_value = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerValue()
self.trigger_value.parent = self
self._children_name_map["trigger_value"] = "trigger-value"
self.trigger_type = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerType()
self.trigger_type.parent = self
self._children_name_map["trigger_type"] = "trigger-type"
self._segment_path = lambda: "threshold"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold, [], name, value)
class TriggerValue(_Entity_):
"""
Alarm trigger value settings.
.. attribute:: greater_than
Greater than
**type**\: str
.. attribute:: greater_than_equal
Greater than equal
**type**\: str
.. attribute:: less_than
Less than
**type**\: str
.. attribute:: less_than_equal
Less than equal
**type**\: str
.. attribute:: range
Range
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerValue, self).__init__()
self.yang_name = "trigger-value"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('greater_than', (YLeaf(YType.str, 'greater-than'), ['str'])),
('greater_than_equal', (YLeaf(YType.str, 'greater-than-equal'), ['str'])),
('less_than', (YLeaf(YType.str, 'less-than'), ['str'])),
('less_than_equal', (YLeaf(YType.str, 'less-than-equal'), ['str'])),
('range', (YLeaf(YType.str, 'range'), ['str'])),
])
self.greater_than = None
self.greater_than_equal = None
self.less_than = None
self.less_than_equal = None
self.range = None
self._segment_path = lambda: "trigger-value"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerValue, ['greater_than', 'greater_than_equal', 'less_than', 'less_than_equal', 'range'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerValue']['meta_info']
class TriggerType(_Entity_):
"""
Alarm trigger type settings.
.. attribute:: immediate
Immediate trigger
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: average
Trigger averaged over N intervals
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerType, self).__init__()
self.yang_name = "trigger-type"
self.yang_parent_name = "threshold"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('immediate', (YLeaf(YType.empty, 'immediate'), ['Empty'])),
('average', (YLeaf(YType.uint32, 'average'), ['int'])),
])
self.immediate = None
self.average = None
self._segment_path = lambda: "trigger-type"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerType, ['immediate', 'average'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold.TriggerType']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React.Threshold']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.React']['meta_info']
class PbrRedirect(_Entity_):
"""
Policy action redirect
.. attribute:: ipv4
Policy action redirect IPv4
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv4>`
.. attribute:: ipv6
Policy action redirect IPv6
**type**\: :py:class:`Ipv6 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv6>`
.. attribute:: next_hop
Next hop address
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect, self).__init__()
self.yang_name = "pbr-redirect"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ipv4", ("ipv4", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv4)), ("ipv6", ("ipv6", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv6)), ("next-hop", ("next_hop", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop))])
self._leafs = OrderedDict()
self.ipv4 = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv4()
self.ipv4.parent = self
self._children_name_map["ipv4"] = "ipv4"
self.ipv6 = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv6()
self.ipv6.parent = self
self._children_name_map["ipv6"] = "ipv6"
self.next_hop = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self._segment_path = lambda: "pbr-redirect"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect, [], name, value)
class Ipv4(_Entity_):
"""
Policy action redirect IPv4
.. attribute:: ipv4_next_hop
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: vrf
IPv4 VRF
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv4, self).__init__()
self.yang_name = "ipv4"
self.yang_parent_name = "pbr-redirect"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ipv4_next_hop', (YLeaf(YType.str, 'ipv4-next-hop'), ['str'])),
('vrf', (YLeaf(YType.str, 'vrf'), ['str'])),
])
self.ipv4_next_hop = None
self.vrf = None
self._segment_path = lambda: "ipv4"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv4, ['ipv4_next_hop', 'vrf'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv4']['meta_info']
class Ipv6(_Entity_):
"""
Policy action redirect IPv6
.. attribute:: ipv6_next_hop
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: vrf
IPv6 VRF
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv6, self).__init__()
self.yang_name = "ipv6"
self.yang_parent_name = "pbr-redirect"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ipv6_next_hop', (YLeaf(YType.str, 'ipv6-next-hop'), ['str'])),
('vrf', (YLeaf(YType.str, 'vrf'), ['str'])),
])
self.ipv6_next_hop = None
self.vrf = None
self._segment_path = lambda: "ipv6"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv6, ['ipv6_next_hop', 'vrf'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.Ipv6']['meta_info']
class NextHop(_Entity_):
"""
Next hop address.
.. attribute:: route_target
Route Target
**type**\: :py:class:`RouteTarget <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "pbr-redirect"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route-target", ("route_target", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget))])
self._leafs = OrderedDict()
self.route_target = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget()
self.route_target.parent = self
self._children_name_map["route_target"] = "route-target"
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop, [], name, value)
class RouteTarget(_Entity_):
"""
Route Target
.. attribute:: ipv4_address
IPv4 address
**type**\: :py:class:`Ipv4Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget.Ipv4Address>`
.. attribute:: as_number
2\-byte/4\-byte AS number
**type**\: int
**range:** 1..4294967295
.. attribute:: index
ASN2\:index 2/4 byte (hex or decimal format)
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget, self).__init__()
self.yang_name = "route-target"
self.yang_parent_name = "next-hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ipv4-address", ("ipv4_address", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget.Ipv4Address))])
self._leafs = OrderedDict([
('as_number', (YLeaf(YType.uint32, 'as-number'), ['int'])),
('index', (YLeaf(YType.uint32, 'index'), ['int'])),
])
self.as_number = None
self.index = None
self.ipv4_address = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget.Ipv4Address()
self.ipv4_address.parent = self
self._children_name_map["ipv4_address"] = "ipv4-address"
self._segment_path = lambda: "route-target"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget, ['as_number', 'index'], name, value)
class Ipv4Address(_Entity_):
"""
IPv4 address.
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: netmask
IPv4 netmask
**type**\: str
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget.Ipv4Address, self).__init__()
self.yang_name = "ipv4-address"
self.yang_parent_name = "route-target"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('address', (YLeaf(YType.str, 'address'), ['str'])),
('netmask', (YLeaf(YType.str, 'netmask'), ['str'])),
])
self.address = None
self.netmask = None
self._segment_path = lambda: "ipv4-address"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget.Ipv4Address, ['address', 'netmask'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget.Ipv4Address']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop.RouteTarget']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect.NextHop']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrRedirect']['meta_info']
class PbrForward(_Entity_):
"""
Policy action PBR forward.
.. attribute:: default
Use system default routing table
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: next_hop
Use specific next\-hop. Here we present 5 different combination for the pbf next\-hop. 1. vrf with v6 address 2. vrf with v4 address 3. vrf 4. v4 address 5. v6 address
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_policymgr_cfg.PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward.NextHop>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward, self).__init__()
self.yang_name = "pbr-forward"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward.NextHop))])
self._leafs = OrderedDict([
('default', (YLeaf(YType.empty, 'default'), ['Empty'])),
])
self.default = None
self.next_hop = PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self._segment_path = lambda: "pbr-forward"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward, ['default'], name, value)
class NextHop(_Entity_):
"""
Use specific next\-hop.
Here we present 5 different combination
for the pbf next\-hop.
1. vrf with v6 address
2. vrf with v4 address
3. vrf
4. v4 address
5. v6 address
.. attribute:: vrf
VRF name
**type**\: str
.. attribute:: ipv4_address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6_address
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "pbr-forward"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('vrf', (YLeaf(YType.str, 'vrf'), ['str'])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.vrf = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward.NextHop, ['vrf', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward.NextHop']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.PbrForward']['meta_info']
class ServiceFunctionPath(_Entity_):
"""
Policy action service function path.
.. attribute:: path_id
Service function path id
**type**\: int
**range:** 1..16777215
**mandatory**\: True
.. attribute:: index
Service function path index
**type**\: int
**range:** 1..255
**mandatory**\: True
.. attribute:: metadata
Service function path metadata name
**type**\: str
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServiceFunctionPath, self).__init__()
self.yang_name = "service-function-path"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self.is_presence_container = True
self._leafs = OrderedDict([
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('index', (YLeaf(YType.uint8, 'index'), ['int'])),
('metadata', (YLeaf(YType.str, 'metadata'), ['str'])),
])
self.path_id = None
self.index = None
self.metadata = None
self._segment_path = lambda: "service-function-path"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServiceFunctionPath, ['path_id', 'index', 'metadata'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.ServiceFunctionPath']['meta_info']
class HttpEnrichment(_Entity_):
"""
HTTP Enrichment action
.. attribute:: subscribermac
Subscriber Mac
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: subscriberip
Subscriber IP
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: hostname
Hostname
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: bngidentifierinterface
Bng Identifier Interface
**type**\: :py:class:`Empty<ydk.types.Empty>`
"""
_prefix = 'infra-policymgr-cfg'
_revision = '2019-10-02'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.HttpEnrichment, self).__init__()
self.yang_name = "http-enrichment"
self.yang_parent_name = "policy-map-rule"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('subscribermac', (YLeaf(YType.empty, 'subscribermac'), ['Empty'])),
('subscriberip', (YLeaf(YType.empty, 'subscriberip'), ['Empty'])),
('hostname', (YLeaf(YType.empty, 'hostname'), ['Empty'])),
('bngidentifierinterface', (YLeaf(YType.empty, 'bngidentifierinterface'), ['Empty'])),
])
self.subscribermac = None
self.subscriberip = None
self.hostname = None
self.bngidentifierinterface = None
self._segment_path = lambda: "http-enrichment"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.HttpEnrichment, ['subscribermac', 'subscriberip', 'hostname', 'bngidentifierinterface'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule.HttpEnrichment']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap.PolicyMapRule']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps.PolicyMap']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager.PolicyMaps']['meta_info']
def clone_ptr(self):
self._top_entity = PolicyManager()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_policymgr_cfg as meta
return meta._meta_table['PolicyManager']['meta_info']
| 48.289082 | 1,665 | 0.427371 | 28,978 | 360,478 | 5.069984 | 0.022845 | 0.020801 | 0.026001 | 0.089131 | 0.86873 | 0.84262 | 0.815931 | 0.784342 | 0.768919 | 0.748499 | 0 | 0.022757 | 0.460722 | 360,478 | 7,464 | 1,666 | 48.295552 | 0.733001 | 0.23243 | 0 | 0.726014 | 0 | 0.000659 | 0.136445 | 0.040945 | 0.006924 | 0 | 0 | 0 | 0 | 1 | 0.085394 | false | 0.000989 | 0.032311 | 0 | 0.1909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
fc0ae43996b29e9ae4384a0752ab0f0ddaf09220 | 4,607 | py | Python | eleusis-logic/Python/Verifier.py | OJP98/eleusis-angular | e078ee751c19ddc4063e03190f2e4a889329a1fd | [
"MIT"
] | null | null | null | eleusis-logic/Python/Verifier.py | OJP98/eleusis-angular | e078ee751c19ddc4063e03190f2e4a889329a1fd | [
"MIT"
] | 1 | 2022-02-13T19:33:10.000Z | 2022-02-13T19:33:10.000Z | eleusis-logic/Python/Verifier.py | OJP98/eleusis-angular | e078ee751c19ddc4063e03190f2e4a889329a1fd | [
"MIT"
] | null | null | null | def verifyFirstCard(rule_info,card):
rule_p1 = rule_info[0]
rule_p2 = rule_info[1]
rule_p3 = rule_info[2]
#En esta parte se valida los patrones de colores
if rule_p1 == "0":
card = card[len(card)-1]
#Verifica que la carta tiene uno de los colores permitidos
if rule_p2 == "0":
if rule_p3.find(card) == -1:
#Si no encuentra la carta entre las prohibidas devuelve un True
return True
else:
#Si no encuentra la carta entre las prohibidas devuelve un False
return False
#Verifica que la carta tenga color permitido (al ser la primera no importa en que lugar este)
else:
if rule_p3.find(card) == -1:
#Verifica que este dentro de las permitidas, si no lo esta devuelve un False
return False
else:
#Verifica que este dentro de las permitidas, si lo esta devuelve un True
return True
#En esta parte se valida los patrones de numeros
else:
if rule_p2 == "0":
multiplos=[]
for i in range(13):
multiplos.append(int(rule_p3)*(i+1))
if int(card) in multiplos:
return True
else:
return False
elif rule_p2 == "1":
#Se verifica que el numero sea mayor al indicado
print ("Ingresa a mayor que ", card, " la que recibe ", rule_p3," la que compara")
if int(card) >= int(rule_p3):
return True
else:
return False
elif rule_p2 == "2":
print ("Ingresa a menor que ", card, " la que recibe ", rule_p3," la que compara")
#Se verifica que el numero sea menor al indicado
if int(card) <= int(rule_p3):
return True
else:
return False
else:
print ("Ingresa a prohibido ", card, " la que recibe ", rule_p3," la que compara")
#El numero es el prohibido
if card == rule_p3:
return False
else:
return True
def verify_Card(rule_info,last_card,card):
rule_p1 = rule_info[0]
rule_p2 = rule_info[1]
rule_p3 = rule_info[2]
#En esta parte se valida los colores
if rule_p1 == "0":
card = card[len(card)-1]
#Verifica que la carta tiene uno de los colores permitidos
if rule_p2 == "0":
if rule_p3.find(card) == -1:
#Si no encuentra la carta entre las prohibidas devuelve un True
return True
else:
#Si no encuentra la carta entre las prohibidas devuelve un False
return False
#Verifica que la carta tenga color permitido y orden correcto
else:
last_card = last_card[len(last_card)-1]
pivot = last_card + card
rule_p3 = rule_p3 + rule_p3
if rule_p3.find(pivot) == -1:
#Si no esta en el orden correcto devuelve False
return False
else:
#Si esta en el orden correcto devuelve False
return True
#En esta parte se valida los patrones de numeros
else:
card = card[:len(card)-1]
if rule_p2 == "0":
multiplos=[]
for i in range(13):
multiplos.append(int(rule_p3)*(i+1))
if int(card) in multiplos:
return True
else:
return False
elif rule_p2 == "1":
#Se verifica que el numero sea mayor al indicado
print ("Ingresa a mayor que ", card, " la que recibe ", rule_p3," la que compara")
if int(card) >= int(rule_p3):
return True
else:
return False
elif rule_p2 == "2":
print ("Ingresa a menor que ", card, " la que recibe ", rule_p3," la que compara")
#Se verifica que el numero sea menor al indicado
if int(card) <= int(rule_p3):
return True
else:
return False
else:
print ("Ingresa a prohibido ", card, " la que recibe ", rule_p3," la que compara")
#El numero es el prohibido
if int(card) == int(rule_p3):
#No debe ser igual al numero prohibido
return False
else:
#No debe ser igual al numero prohibido
return True
| 37.153226 | 101 | 0.520946 | 587 | 4,607 | 4.001704 | 0.144804 | 0.058748 | 0.04768 | 0.051086 | 0.882503 | 0.858663 | 0.844615 | 0.844615 | 0.744998 | 0.744998 | 0 | 0.024578 | 0.408292 | 4,607 | 123 | 102 | 37.455285 | 0.837124 | 0.268939 | 0 | 0.9 | 0 | 0 | 0.092703 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.022222 | false | 0 | 0 | 0 | 0.288889 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fc15021c7b8a0ca246156466124ef8e362ed5070 | 9,146 | py | Python | blackbook/migrations/0058_bonus_paycheck_paycheckitem_paycheckitemcategory.py | bsiebens/blackbook | 636d1adc8966db158914abba43e360c6a0d23173 | [
"MIT"
] | 1 | 2021-05-10T19:15:48.000Z | 2021-05-10T19:15:48.000Z | blackbook/migrations/0058_bonus_paycheck_paycheckitem_paycheckitemcategory.py | bsiebens/BlackBook | 636d1adc8966db158914abba43e360c6a0d23173 | [
"MIT"
] | 20 | 2020-12-27T15:56:24.000Z | 2021-09-22T18:25:02.000Z | blackbook/migrations/0058_bonus_paycheck_paycheckitem_paycheckitemcategory.py | bsiebens/BlackBook | 636d1adc8966db158914abba43e360c6a0d23173 | [
"MIT"
] | null | null | null | # Generated by Django 3.2rc1 on 2021-05-07 17:12
from decimal import Decimal
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import djmoney.models.fields
class Migration(migrations.Migration):
dependencies = [
('blackbook', '0057_auto_20210507_1911'),
]
operations = [
migrations.CreateModel(
name='Bonus',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(default=django.utils.timezone.localdate)),
('gross_amount_currency', djmoney.models.fields.CurrencyField(choices=[('AMD', 'Armenian Dram'), ('AZN', 'Azerbaijanian Manat'), ('BYN', 'Belarussian Ruble'), ('BGN', 'Bulgarian Lev'), ('BAM', 'Convertible Marks'), ('HRK', 'Croatian Kuna'), ('CZK', 'Czech Koruna'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('EUR', 'Euro'), ('HUF', 'Forint'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('GEL', 'Lari'), ('ALL', 'Lek'), ('MDL', 'Moldovan Leu'), ('RON', 'New Leu'), ('NOK', 'Norwegian Krone'), ('GBP', 'Pound Sterling'), ('RUB', 'Russian Ruble'), ('RSD', 'Serbian Dinar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('TRY', 'Turkish Lira'), ('PLN', 'Zloty')], default='EUR', editable=False, max_length=3)),
('gross_amount', djmoney.models.fields.MoneyField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('net_amount_currency', djmoney.models.fields.CurrencyField(choices=[('AMD', 'Armenian Dram'), ('AZN', 'Azerbaijanian Manat'), ('BYN', 'Belarussian Ruble'), ('BGN', 'Bulgarian Lev'), ('BAM', 'Convertible Marks'), ('HRK', 'Croatian Kuna'), ('CZK', 'Czech Koruna'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('EUR', 'Euro'), ('HUF', 'Forint'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('GEL', 'Lari'), ('ALL', 'Lek'), ('MDL', 'Moldovan Leu'), ('RON', 'New Leu'), ('NOK', 'Norwegian Krone'), ('GBP', 'Pound Sterling'), ('RUB', 'Russian Ruble'), ('RSD', 'Serbian Dinar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('TRY', 'Turkish Lira'), ('PLN', 'Zloty')], default='EUR', editable=False, max_length=3)),
('net_amount', djmoney.models.fields.MoneyField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('taxes_currency', djmoney.models.fields.CurrencyField(choices=[('AMD', 'Armenian Dram'), ('AZN', 'Azerbaijanian Manat'), ('BYN', 'Belarussian Ruble'), ('BGN', 'Bulgarian Lev'), ('BAM', 'Convertible Marks'), ('HRK', 'Croatian Kuna'), ('CZK', 'Czech Koruna'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('EUR', 'Euro'), ('HUF', 'Forint'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('GEL', 'Lari'), ('ALL', 'Lek'), ('MDL', 'Moldovan Leu'), ('RON', 'New Leu'), ('NOK', 'Norwegian Krone'), ('GBP', 'Pound Sterling'), ('RUB', 'Russian Ruble'), ('RSD', 'Serbian Dinar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('TRY', 'Turkish Lira'), ('PLN', 'Zloty')], default='EUR', editable=False, max_length=3)),
('taxes', djmoney.models.fields.MoneyField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('tax_percentage', models.DecimalField(decimal_places=2, default=0, max_digits=5)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name_plural': 'bonuses',
},
),
migrations.CreateModel(
name='Paycheck',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(default=django.utils.timezone.localdate, verbose_name='date')),
('amount_currency', djmoney.models.fields.CurrencyField(choices=[('AMD', 'Armenian Dram'), ('AZN', 'Azerbaijanian Manat'), ('BYN', 'Belarussian Ruble'), ('BGN', 'Bulgarian Lev'), ('BAM', 'Convertible Marks'), ('HRK', 'Croatian Kuna'), ('CZK', 'Czech Koruna'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('EUR', 'Euro'), ('HUF', 'Forint'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('GEL', 'Lari'), ('ALL', 'Lek'), ('MDL', 'Moldovan Leu'), ('RON', 'New Leu'), ('NOK', 'Norwegian Krone'), ('GBP', 'Pound Sterling'), ('RUB', 'Russian Ruble'), ('RSD', 'Serbian Dinar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('TRY', 'Turkish Lira'), ('PLN', 'Zloty')], default='EUR', editable=False, max_length=3)),
('amount', djmoney.models.fields.MoneyField(decimal_places=2, default=Decimal('0'), max_digits=15, verbose_name='amount')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='PayCheckItemCategory',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=250, unique=True, verbose_name='name')),
('type', models.CharField(choices=[('Gross amount', 'Gross'), ('Taxable amount', 'Taxable'), ('Net amount', 'Net')], default='Gross amount', max_length=50, verbose_name='type')),
('counterbalance', models.BooleanField(default=False)),
('counterbalance_percentage', models.DecimalField(decimal_places=2, default=0, help_text='100% means the entire amount will be counterbalanced.', max_digits=5)),
('default_amount_currency', djmoney.models.fields.CurrencyField(choices=[('AMD', 'Armenian Dram'), ('AZN', 'Azerbaijanian Manat'), ('BYN', 'Belarussian Ruble'), ('BGN', 'Bulgarian Lev'), ('BAM', 'Convertible Marks'), ('HRK', 'Croatian Kuna'), ('CZK', 'Czech Koruna'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('EUR', 'Euro'), ('HUF', 'Forint'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('GEL', 'Lari'), ('ALL', 'Lek'), ('MDL', 'Moldovan Leu'), ('RON', 'New Leu'), ('NOK', 'Norwegian Krone'), ('GBP', 'Pound Sterling'), ('RUB', 'Russian Ruble'), ('RSD', 'Serbian Dinar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('TRY', 'Turkish Lira'), ('PLN', 'Zloty')], default='EUR', editable=False, max_length=3)),
('default_amount', djmoney.models.fields.MoneyField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('default', models.BooleanField(default=False, help_text='Show this category as a default field on a new paycheck?')),
],
options={
'verbose_name_plural': 'paycheck item categories',
},
),
migrations.CreateModel(
name='PayCheckItem',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount_currency', djmoney.models.fields.CurrencyField(choices=[('AMD', 'Armenian Dram'), ('AZN', 'Azerbaijanian Manat'), ('BYN', 'Belarussian Ruble'), ('BGN', 'Bulgarian Lev'), ('BAM', 'Convertible Marks'), ('HRK', 'Croatian Kuna'), ('CZK', 'Czech Koruna'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('EUR', 'Euro'), ('HUF', 'Forint'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('GEL', 'Lari'), ('ALL', 'Lek'), ('MDL', 'Moldovan Leu'), ('RON', 'New Leu'), ('NOK', 'Norwegian Krone'), ('GBP', 'Pound Sterling'), ('RUB', 'Russian Ruble'), ('RSD', 'Serbian Dinar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('TRY', 'Turkish Lira'), ('PLN', 'Zloty')], default='EUR', editable=False, max_length=3)),
('amount', djmoney.models.fields.MoneyField(decimal_places=2, default=Decimal('0'), max_digits=10, verbose_name='amount')),
('real_amount_currency', djmoney.models.fields.CurrencyField(choices=[('AMD', 'Armenian Dram'), ('AZN', 'Azerbaijanian Manat'), ('BYN', 'Belarussian Ruble'), ('BGN', 'Bulgarian Lev'), ('BAM', 'Convertible Marks'), ('HRK', 'Croatian Kuna'), ('CZK', 'Czech Koruna'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('EUR', 'Euro'), ('HUF', 'Forint'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('GEL', 'Lari'), ('ALL', 'Lek'), ('MDL', 'Moldovan Leu'), ('RON', 'New Leu'), ('NOK', 'Norwegian Krone'), ('GBP', 'Pound Sterling'), ('RUB', 'Russian Ruble'), ('RSD', 'Serbian Dinar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('TRY', 'Turkish Lira'), ('PLN', 'Zloty')], default='EUR', editable=False, max_length=3)),
('real_amount', djmoney.models.fields.MoneyField(decimal_places=2, default=Decimal('0'), max_digits=10)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='blackbook.paycheckitemcategory')),
('paycheck', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='blackbook.paycheck')),
],
),
]
| 117.25641 | 734 | 0.597638 | 1,002 | 9,146 | 5.371257 | 0.174651 | 0.036232 | 0.052954 | 0.035117 | 0.808807 | 0.808807 | 0.808807 | 0.808807 | 0.790227 | 0.790227 | 0 | 0.010539 | 0.17002 | 9,146 | 77 | 735 | 118.779221 | 0.698459 | 0.00503 | 0 | 0.422535 | 1 | 0 | 0.352605 | 0.01341 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.070423 | 0 | 0.112676 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5dc221986c216c9644d97a13857922f2d1d5dcdf | 149 | py | Python | point_gcn/runner/__init__.py | gyshgx868/pc-classification | 1667f08785e89bbe475fe7b4dbf8141a29d8c371 | [
"MIT"
] | 7 | 2020-10-23T10:23:14.000Z | 2021-10-06T02:04:02.000Z | point_gcn/runner/__init__.py | gyshgx868/pc-classification | 1667f08785e89bbe475fe7b4dbf8141a29d8c371 | [
"MIT"
] | null | null | null | point_gcn/runner/__init__.py | gyshgx868/pc-classification | 1667f08785e89bbe475fe7b4dbf8141a29d8c371 | [
"MIT"
] | null | null | null | from point_gcn.runner.runner import Runner
from point_gcn.runner.test_runner import TestRunner
from point_gcn.runner.train_runner import TrainRunner
| 37.25 | 53 | 0.879195 | 23 | 149 | 5.478261 | 0.391304 | 0.214286 | 0.285714 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.080537 | 149 | 3 | 54 | 49.666667 | 0.919708 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
5df0c91755fce506c6d7a896788a2c4d25a0d48e | 23,222 | py | Python | h2o-py/h2o/model/binomial.py | PawarPawan/h2o-v3 | cf569a538c9e2ec16ba9fc1a75d14beda8f40c18 | [
"Apache-2.0"
] | null | null | null | h2o-py/h2o/model/binomial.py | PawarPawan/h2o-v3 | cf569a538c9e2ec16ba9fc1a75d14beda8f40c18 | [
"Apache-2.0"
] | null | null | null | h2o-py/h2o/model/binomial.py | PawarPawan/h2o-v3 | cf569a538c9e2ec16ba9fc1a75d14beda8f40c18 | [
"Apache-2.0"
] | 1 | 2020-12-18T19:20:02.000Z | 2020-12-18T19:20:02.000Z | """
Binomial Models
"""
from metrics_base import *
class H2OBinomialModel(ModelBase):
"""
Class for Binomial models.
"""
def __init__(self, dest_key, model_json):
"""
Create a new binomial model.
"""
super(H2OBinomialModel, self).__init__(dest_key, model_json,H2OBinomialModelMetrics)
def F1(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the F1 for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the F1 value for the training data.
:param valid: If valid is True, then return the F1 value for the validation data.
:param xval: If xval is True, then return the F1 value for the cross validation data.
:return: The F1 for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("f1", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def F2(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the F2 for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the F2 value for the training data.
:param valid: If valid is True, then return the F2 value for the validation data.
:param xval: If xval is True, then return the F2 value for the cross validation data.
:return: The F2 for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("f2", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def F0point5(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the F0.5 for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the F0point5 value for the training data.
:param valid: If valid is True, then return the F0point5 value for the validation data.
:param xval: If xval is True, then return the F0point5 value for the cross validation data.
:return: The F0point5 for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("f0point5", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def accuracy(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the accuracy for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the accuracy value for the training data.
:param valid: If valid is True, then return the accuracy value for the validation data.
:param xval: If xval is True, then return the accuracy value for the cross validation data.
:return: The accuracy for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("accuracy", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def error(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the error for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the error value for the training data.
:param valid: If valid is True, then return the error value for the validation data.
:param xval: If xval is True, then return the error value for the cross validation data.
:return: The error for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else [[acc[0],1-acc[1]] for acc in v.metric("accuracy", thresholds=thresholds)]
return m.values()[0] if len(m) == 1 else m
def precision(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the precision for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the precision value for the training data.
:param valid: If valid is True, then return the precision value for the validation data.
:param xval: If xval is True, then return the precision value for the cross validation data.
:return: The precision for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("precision", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def tpr(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the True Positive Rate for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the tpr value for the training data.
:param valid: If valid is True, then return the tpr value for the validation data.
:param xval: If xval is True, then return the tpr value for the cross validation data.
:return: The tpr for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("tpr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def tnr(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the True Negative Rate for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the tnr value for the training data.
:param valid: If valid is True, then return the tnr value for the validation data.
:param xval: If xval is True, then return the tnr value for the cross validation data.
:return: The F1 for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("tnr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def fnr(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the False Negative Rates for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the fnr value for the training data.
:param valid: If valid is True, then return the fnr value for the validation data.
:param xval: If xval is True, then return the fnr value for the cross validation data.
:return: The fnr for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("fnr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def fpr(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the False Positive Rates for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the fpr value for the training data.
:param valid: If valid is True, then return the fpr value for the validation data.
:param xval: If xval is True, then return the fpr value for the cross validation data.
:return: The fpr for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("fpr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def recall(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the Recall (AKA True Positive Rate) for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the recall value for the training data.
:param valid: If valid is True, then return the recall value for the validation data.
:param xval: If xval is True, then return the recall value for the cross validation data.
:return: The recall for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("tpr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def sensitivity(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the sensitivity (AKA True Positive Rate or Recall) for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the sensitivity value for the training data.
:param valid: If valid is True, then return the sensitivity value for the validation data.
:param xval: If xval is True, then return the sensitivity value for the cross validation data.
:return: The sensitivity for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("tpr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def fallout(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the Fallout (AKA False Positive Rate) for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the fallout value for the training data.
:param valid: If valid is True, then return the fallout value for the validation data.
:param xval: If xval is True, then return the fallout value for the cross validation data.
:return: The fallout for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("fpr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def missrate(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the miss rate (AKA False Negative Rate) for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the missrate value for the training data.
:param valid: If valid is True, then return the missrate value for the validation data.
:param xval: If xval is True, then return the missrate value for the cross validation data.
:return: The missrate for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("fnr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def specificity(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the specificity (AKA True Negative Rate) for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the specificity value for the training data.
:param valid: If valid is True, then return the specificity value for the validation data.
:param xval: If xval is True, then return the specificity value for the cross validation data.
:return: The specificity for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("tnr", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def mcc(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the mcc for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the mcc value for the training data.
:param valid: If valid is True, then return the mcc value for the validation data.
:param xval: If xval is True, then return the mcc value for the cross validation data.
:return: The mcc for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric("absolute_MCC", thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def max_per_class_error(self, thresholds=None, train=False, valid=False, xval=False):
"""
Get the max per class error for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the max_per_class_error value for the training data.
:param valid: If valid is True, then return the max_per_class_error value for the validation data.
:param xval: If xval is True, then return the max_per_class_error value for the cross validation data.
:return: The max_per_class_error for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else [[mpca[0],1-mpca[1]] for mpca in v.metric("min_per_class_accuracy", thresholds=thresholds)]
return m.values()[0] if len(m) == 1 else m
def metric(self, metric, thresholds=None, train=False, valid=False, xval=False):
"""
Get the metric value for a set of thresholds.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the metrics for the training data.
:param valid: If valid is True, then return the metrics for the validation data.
:param xval: If xval is True, then return the metrics for the cross validation data.
:return: The metrics for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.metric(metric,thresholds)
return m.values()[0] if len(m) == 1 else m
def plot(self, type="roc", train=False, valid=False, xval=False, **kwargs):
"""
Produce the desired metric plot
If all are False (default), then return the training metric value.
:param type: the type of metric plot (currently, only ROC supported)
:param train: If train is True, then plot for training data.
:param valid: If valid is True, then plot for validation data.
:param xval: If xval is True, then plot for cross validation data.
:param show: if False, the plot is not shown. matplotlib show method is blocking.
:return: None
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
for k,v in zip(tm.keys(),tm.values()):
if v is not None: v.plot(type=type, **kwargs)
def confusion_matrix(self, metrics=None, thresholds=None, train=False, valid=False, xval=False):
"""
Get the confusion matrix for the specified metrics/thresholds
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param metrics: A string (or list of strings) in {"min_per_class_accuracy", "absolute_MCC", "tnr", "fnr", "fpr", "tpr", "precision", "accuracy", "f0point5", "f2", "f1"}
:param thresholds: thresholds parameter must be a list (i.e. [0.01, 0.5, 0.99]). If None, then the thresholds in this set of metrics will be used.
:param train: If train is True, then return the confusion matrix value for the training data.
:param valid: If valid is True, then return the confusion matrix value for the validation data.
:param xval: If xval is True, then return the confusion matrix value for the cross validation data.
:return: The confusion matrix for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.confusion_matrix(metrics=metrics, thresholds=thresholds)
return m.values()[0] if len(m) == 1 else m
def find_threshold_by_max_metric(self,metric,train=False, valid=False, xval=False):
"""
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the threshold_by_max_metric value for the training data.
:param valid: If valid is True, then return the threshold_by_max_metric value for the validation data.
:param xval: If xval is True, then return the threshold_by_max_metric value for the cross validation data.
:return: The threshold_by_max_metric for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.find_threshold_by_max_metric(metric)
return m.values()[0] if len(m) == 1 else m
def find_idx_by_threshold(self,threshold,train=False, valid=False, xval=False):
"""
Retrieve the index in this metric's threshold list at which the given threshold is located.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the idx_by_threshold for the training data.
:param valid: If valid is True, then return the idx_by_threshold for the validation data.
:param xval: If xval is True, then return the idx_by_threshold for the cross validation data.
:return: The idx_by_threshold for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(tm.keys(),tm.values()): m[k] = None if v is None else v.find_idx_by_threshold(threshold)
return m.values()[0] if len(m) == 1 else m
| 56.777506 | 172 | 0.691801 | 3,886 | 23,222 | 4.10422 | 0.036027 | 0.066462 | 0.069283 | 0.063201 | 0.922816 | 0.918992 | 0.910026 | 0.904822 | 0.874349 | 0.838109 | 0 | 0.012439 | 0.210662 | 23,222 | 408 | 173 | 56.916667 | 0.857665 | 0.648566 | 0 | 0.646018 | 0 | 0 | 0.014207 | 0.003095 | 0 | 0 | 0 | 0 | 0 | 1 | 0.20354 | false | 0 | 0.00885 | 0 | 0.40708 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5dfab27a1e35aeb2d91dad48fae4dc3cedb78ceb | 6,426 | py | Python | tools/apigee-analytics-load-generator-demo/v1/load-generator/backend/services/checkout/checkout.py | igalonso/devrel | aecd1d281bc21aedb9d88611a2a61ed46a30f3ed | [
"Apache-2.0"
] | null | null | null | tools/apigee-analytics-load-generator-demo/v1/load-generator/backend/services/checkout/checkout.py | igalonso/devrel | aecd1d281bc21aedb9d88611a2a61ed46a30f3ed | [
"Apache-2.0"
] | null | null | null | tools/apigee-analytics-load-generator-demo/v1/load-generator/backend/services/checkout/checkout.py | igalonso/devrel | aecd1d281bc21aedb9d88611a2a61ed46a30f3ed | [
"Apache-2.0"
] | null | null | null | import time
import random
def getCheckout(id):
sleepTime = random.randint(1,20)
print("random sleep: ",sleepTime)
if id == 1:
checkout = {
"id":21,
"items":[
],
"itemsTotal":0,
"adjustments":[
],
"adjustmentsTotal":0,
"total":0,
"customer":{
"id":1,
"email":"shop@example.com",
"firstName":"John",
"lastName":"Doe",
"user":{
"id":1,
"username":"shop@example.com",
"enabled":"true"
},
"_links":{
"self":{
"href":"\/api\/v1\/customers\/1"
}
}
},
"channel":{
"id":1,
"code":"US_WEB",
"_links":{
"self":{
"href":"\/api\/v1\/channels\/US_WEB"
}
}
},
"currencyCode":"USD",
"localeCode":"en_US",
"checkoutState":"cart"
}
elif id == 2:
checkout = {
"id":21,
"items":[
],
"itemsTotal":0,
"adjustments":[
],
"adjustmentsTotal":0,
"total":0,
"customer":{
"id":1,
"email":"shop@example.com",
"firstName":"John",
"lastName":"Doe",
"user":{
"id":1,
"username":"shop@example.com",
"enabled":"true"
},
"_links":{
"self":{
"href":"\/api\/v1\/customers\/1"
}
}
},
"channel":{
"id":1,
"code":"US_WEB",
"_links":{
"self":{
"href":"\/api\/v1\/channels\/US_WEB"
}
}
},
"currencyCode":"USD",
"localeCode":"en_US",
"checkoutState":"cart"
}
elif id == 3:
checkout = {
"id":21,
"items":[
],
"itemsTotal":0,
"adjustments":[
],
"adjustmentsTotal":0,
"total":0,
"customer":{
"id":1,
"email":"shop@example.com",
"firstName":"John",
"lastName":"Doe",
"user":{
"id":1,
"username":"shop@example.com",
"enabled":"true"
},
"_links":{
"self":{
"href":"\/api\/v1\/customers\/1"
}
}
},
"channel":{
"id":1,
"code":"US_WEB",
"_links":{
"self":{
"href":"\/api\/v1\/channels\/US_WEB"
}
}
},
"currencyCode":"USD",
"localeCode":"en_US",
"checkoutState":"cart"
}
elif id == 4:
checkout = {
"id":21,
"items":[
],
"itemsTotal":0,
"adjustments":[
],
"adjustmentsTotal":0,
"total":0,
"customer":{
"id":1,
"email":"shop@example.com",
"firstName":"John",
"lastName":"Doe",
"user":{
"id":1,
"username":"shop@example.com",
"enabled":"true"
},
"_links":{
"self":{
"href":"\/api\/v1\/customers\/1"
}
}
},
"channel":{
"id":1,
"code":"US_WEB",
"_links":{
"self":{
"href":"\/api\/v1\/channels\/US_WEB"
}
}
},
"currencyCode":"USD",
"localeCode":"en_US",
"checkoutState":"cart"
}
elif id == 5:
checkout = {
"id":21,
"items":[
],
"itemsTotal":0,
"adjustments":[
],
"adjustmentsTotal":0,
"total":0,
"customer":{
"id":1,
"email":"shop@example.com",
"firstName":"John",
"lastName":"Doe",
"user":{
"id":1,
"username":"shop@example.com",
"enabled":"true"
},
"_links":{
"self":{
"href":"\/api\/v1\/customers\/1"
}
}
},
"channel":{
"id":1,
"code":"US_WEB",
"_links":{
"self":{
"href":"\/api\/v1\/channels\/US_WEB"
}
}
},
"currencyCode":"USD",
"localeCode":"en_US",
"checkoutState":"cart"
}
else:
return Exception
if sleepTime == id:
print("sleeping")
time.sleep (random.randint(0,7))
return checkout | 29.612903 | 64 | 0.253501 | 342 | 6,426 | 4.690058 | 0.178363 | 0.029925 | 0.087282 | 0.099751 | 0.887781 | 0.887781 | 0.887781 | 0.887781 | 0.887781 | 0.887781 | 0 | 0.026125 | 0.612823 | 6,426 | 217 | 65 | 29.612903 | 0.618569 | 0 | 0 | 0.728155 | 0 | 0 | 0.228256 | 0.038898 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004854 | false | 0 | 0.009709 | 0 | 0.024272 | 0.009709 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
5375726fa68b8bd88e77dbe75bb87916416eaf0d | 8,560 | py | Python | tests/pytests/functional/modules/state/requisites/test_onchanges.py | tomdoherty/salt | f87d5d7abbf9777773c4d91fdafecb8b1a728e76 | [
"Apache-2.0"
] | 9,425 | 2015-01-01T05:59:24.000Z | 2022-03-31T20:44:05.000Z | tests/pytests/functional/modules/state/requisites/test_onchanges.py | tomdoherty/salt | f87d5d7abbf9777773c4d91fdafecb8b1a728e76 | [
"Apache-2.0"
] | 33,507 | 2015-01-01T00:19:56.000Z | 2022-03-31T23:48:20.000Z | tests/pytests/functional/modules/state/requisites/test_onchanges.py | tomdoherty/salt | f87d5d7abbf9777773c4d91fdafecb8b1a728e76 | [
"Apache-2.0"
] | 5,810 | 2015-01-01T19:11:45.000Z | 2022-03-31T02:37:20.000Z | import pytest
from . import normalize_ret
pytestmark = [
pytest.mark.windows_whitelisted,
]
def test_requisites_onchanges_any(state, state_tree):
"""
Call sls file containing several require_in and require.
Ensure that some of them are failing and that the order is right.
"""
sls_contents = """
changing_state:
cmd.run:
- name: echo "Changed!"
another_changing_state:
cmd.run:
- name: echo "Changed!"
non_changing_state:
test.succeed_without_changes:
- comment: non_changing_state not changed
another_non_changing_state:
test.succeed_without_changes:
- comment: another_non_changing_state not changed
# Should succeed since at least one will have changes
test_one_changing_states:
cmd.run:
- name: echo "Success!"
- onchanges_any:
- cmd: changing_state
- cmd: another_changing_state
- test: non_changing_state
- test: another_non_changing_state
test_two_non_changing_states:
cmd.run:
- name: echo "Should not run"
- onchanges_any:
- test: non_changing_state
- test: another_non_changing_state
"""
expected_result = {
'cmd_|-another_changing_state_|-echo "Changed!"_|-run': {
"__run_num__": 1,
"changes": True,
"comment": 'Command "echo "Changed!"" run',
"result": True,
},
'cmd_|-changing_state_|-echo "Changed!"_|-run': {
"__run_num__": 0,
"changes": True,
"comment": 'Command "echo "Changed!"" run',
"result": True,
},
'cmd_|-test_one_changing_states_|-echo "Success!"_|-run': {
"__run_num__": 4,
"changes": True,
"comment": 'Command "echo "Success!"" run',
"result": True,
},
'cmd_|-test_two_non_changing_states_|-echo "Should not run"_|-run': {
"__run_num__": 5,
"changes": False,
"comment": "State was not run because none of the onchanges reqs changed",
"result": True,
},
"test_|-another_non_changing_state_|-another_non_changing_state_|-succeed_without_changes": {
"__run_num__": 3,
"changes": False,
"comment": "another_non_changing_state not changed",
"result": True,
},
"test_|-non_changing_state_|-non_changing_state_|-succeed_without_changes": {
"__run_num__": 2,
"changes": False,
"comment": "non_changing_state not changed",
"result": True,
},
}
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
ret = state.sls("requisite")
result = normalize_ret(ret.raw)
assert result == expected_result
def test_onchanges_requisite(state, state_tree):
"""
Tests a simple state using the onchanges requisite
"""
sls_contents = """
changing_state:
cmd.run:
- name: echo "Changed!"
non_changing_state:
test.succeed_without_changes:
- comment: non_changing_state not changed
test_changing_state:
cmd.run:
- name: echo "Success!"
- onchanges:
- cmd: changing_state
test_non_changing_state:
cmd.run:
- name: echo "Should not run"
- onchanges:
- test: non_changing_state
"""
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
ret = state.sls("requisite")
assert (
ret['cmd_|-test_changing_state_|-echo "Success!"_|-run'].comment
== 'Command "echo "Success!"" run'
)
assert (
ret['cmd_|-test_non_changing_state_|-echo "Should not run"_|-run'].comment
== "State was not run because none of the onchanges reqs changed"
)
def test_onchanges_requisite_multiple(state, state_tree):
"""
Tests a simple state using the onchanges requisite
"""
sls_contents = """
changing_state:
cmd.run:
- name: echo "Changed!"
another_changing_state:
cmd.run:
- name: echo "Changed!"
non_changing_state:
test.succeed_without_changes:
- comment: non_changing_state not changed
another_non_changing_state:
test.succeed_without_changes:
- comment: another_non_changing_state not changed
test_two_changing_states:
cmd.run:
- name: echo "Success!"
- onchanges:
- cmd: changing_state
- cmd: another_changing_state
test_two_non_changing_states:
cmd.run:
- name: echo "Should not run"
- onchanges:
- test: non_changing_state
- test: another_non_changing_state
test_one_changing_state:
cmd.run:
- name: echo "Success!"
- onchanges:
- cmd: changing_state
- test: non_changing_state
"""
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
ret = state.sls("requisite")
assert (
ret['cmd_|-test_two_changing_states_|-echo "Success!"_|-run'].comment
== 'Command "echo "Success!"" run'
)
assert (
ret[
'cmd_|-test_two_non_changing_states_|-echo "Should not run"_|-run'
].comment
== "State was not run because none of the onchanges reqs changed"
)
assert (
ret['cmd_|-test_one_changing_state_|-echo "Success!"_|-run'].comment
== 'Command "echo "Success!"" run'
)
def test_onchanges_in_requisite(state, state_tree):
"""
Tests a simple state using the onchanges_in requisite
"""
sls_contents = """
changing_state:
cmd.run:
- name: echo "Changed!"
- onchanges_in:
- cmd: test_changes_expected
non_changing_state:
test.succeed_without_changes:
- comment: non_changing_state not changed
- onchanges_in:
- cmd: test_changes_not_expected
test_changes_expected:
cmd.run:
- name: echo "Success!"
test_changes_not_expected:
cmd.run:
- name: echo "Should not run"
"""
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
ret = state.sls("requisite")
assert (
ret['cmd_|-test_changes_expected_|-echo "Success!"_|-run'].comment
== 'Command "echo "Success!"" run'
)
assert (
ret['cmd_|-test_changes_not_expected_|-echo "Should not run"_|-run'].comment
== "State was not run because none of the onchanges reqs changed"
)
def test_onchanges_requisite_no_state_module(state, state_tree):
"""
Tests a simple state using the onchanges requisite without state modules
"""
sls_contents = """
changing_state:
cmd.run:
- name: echo "Changed!"
non_changing_state:
test.succeed_without_changes:
- comment: non_changing_state not changed
test_changing_state:
cmd.run:
- name: echo "Success!"
- onchanges:
- changing_state
test_non_changing_state:
cmd.run:
- name: echo "Should not run"
- onchanges:
- non_changing_state
"""
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
ret = state.sls("requisite")
assert (
ret['cmd_|-test_changing_state_|-echo "Success!"_|-run'].comment
== 'Command "echo "Success!"" run'
)
def test_onchanges_requisite_with_duration(state, state_tree):
"""
Tests a simple state using the onchanges requisite
the state will not run but results will include duration
"""
sls_contents = """
changing_state:
cmd.run:
- name: echo "Changed!"
non_changing_state:
test.succeed_without_changes:
- comment: non_changing_state not changed
test_changing_state:
cmd.run:
- name: echo "Success!"
- onchanges:
- cmd: changing_state
test_non_changing_state:
cmd.run:
- name: echo "Should not run"
- onchanges:
- test: non_changing_state
"""
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
ret = state.sls("requisite")
assert (
"duration"
in ret['cmd_|-test_non_changing_state_|-echo "Should not run"_|-run']
)
| 28.918919 | 101 | 0.596846 | 940 | 8,560 | 5.087234 | 0.098936 | 0.168549 | 0.123798 | 0.061481 | 0.865747 | 0.824341 | 0.811585 | 0.774362 | 0.734003 | 0.714555 | 0 | 0.001001 | 0.299533 | 8,560 | 295 | 102 | 29.016949 | 0.796531 | 0.053738 | 0 | 0.717949 | 0 | 0 | 0.670166 | 0.181477 | 0 | 0 | 0 | 0 | 0.042735 | 1 | 0.025641 | false | 0 | 0.008547 | 0 | 0.034188 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
53a3e4a41df4cca5dd263b6a739e7edda6fad33e | 100,851 | py | Python | metalsmith/test/test_provisioner.py | openstack/metalsmith | 880d9e47d3fe3f8d6cb83311b0fde3173f92beb4 | [
"Apache-2.0"
] | 8 | 2018-06-27T11:19:31.000Z | 2020-06-17T08:05:11.000Z | metalsmith/test/test_provisioner.py | openstack/metalsmith | 880d9e47d3fe3f8d6cb83311b0fde3173f92beb4 | [
"Apache-2.0"
] | null | null | null | metalsmith/test/test_provisioner.py | openstack/metalsmith | 880d9e47d3fe3f8d6cb83311b0fde3173f92beb4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from unittest import mock
from openstack import exceptions as os_exc
import requests
from metalsmith import _instance
from metalsmith import _network_metadata
from metalsmith import _provisioner
from metalsmith import _utils
from metalsmith import exceptions
from metalsmith import instance_config
from metalsmith import sources
NODE_FIELDS = ['name', 'id', 'instance_info', 'instance_id', 'is_maintenance',
'maintenance_reason', 'properties', 'provision_state', 'extra',
'last_error', 'traits', 'resource_class', 'conductor_group',
'allocation_id']
class TestInit(unittest.TestCase):
def test_missing_auth(self):
self.assertRaisesRegex(TypeError, 'must be provided',
_provisioner.Provisioner)
def test_both_provided(self):
self.assertRaisesRegex(TypeError, 'not both', _provisioner.Provisioner,
session=mock.Mock(), cloud_region=mock.Mock())
@mock.patch.object(_provisioner.connection, 'Connection', autospec=True)
def test_session_only(self, mock_conn):
session = mock.Mock()
_provisioner.Provisioner(session=session)
mock_conn.assert_called_once_with(session=session)
@mock.patch.object(_provisioner.connection, 'Connection', autospec=True)
def test_cloud_region_only(self, mock_conn):
region = mock.Mock()
mock_conn.return_value.baremetal = mock.Mock(spec=['get_endpoint'])
mock_conn.return_value.baremetal.get_endpoint.return_value = 'http://'
_provisioner.Provisioner(cloud_region=region)
mock_conn.assert_called_once_with(config=region)
class Base(unittest.TestCase):
def setUp(self):
super(Base, self).setUp()
self.pr = _provisioner.Provisioner(mock.Mock())
self._reset_api_mock()
self.node = mock.Mock(spec=NODE_FIELDS + ['to_dict'],
id='000', instance_id=None,
properties={'local_gb': 100},
instance_info={},
is_maintenance=False, extra={},
allocation_id=None)
self.node.name = 'control-0'
def _reset_api_mock(self):
get_node_patcher = mock.patch.object(
_provisioner.Provisioner, '_get_node', autospec=True)
self.mock_get_node = get_node_patcher.start()
self.addCleanup(get_node_patcher.stop)
self.mock_get_node.side_effect = (
lambda self, n, refresh=False: n
)
self.api = mock.Mock(spec=['image', 'network', 'baremetal'])
self.api.baremetal.update_node.side_effect = lambda n, **kw: n
self.api.baremetal.patch_node.side_effect = lambda n, _p: n
self.api.network.ports.return_value = [
mock.Mock(spec=['id'], id=i) for i in ('000', '111')
]
self.api.baremetal.set_node_provision_state.side_effect = (
lambda node, *args, **kwargs: node)
self.api.baremetal.wait_for_nodes_provision_state.side_effect = (
lambda nodes, *args, **kwargs: nodes)
self.pr.connection = self.api
class TestGetFindNode(unittest.TestCase):
def setUp(self):
super(TestGetFindNode, self).setUp()
self.pr = _provisioner.Provisioner(mock.Mock())
self.api = mock.Mock(spec=['baremetal'])
self.pr.connection = self.api
def test__get_node_with_node(self):
node = mock.Mock(spec=['id', 'name'])
result = self.pr._get_node(node)
self.assertIs(result, node)
self.assertFalse(self.api.baremetal.get_node.called)
def test__get_node_with_node_refresh(self):
node = mock.Mock(spec=['id', 'name'])
result = self.pr._get_node(node, refresh=True)
self.assertIs(result, self.api.baremetal.get_node.return_value)
self.api.baremetal.get_node.assert_called_once_with(node.id)
def test__get_node_with_instance(self):
node = mock.Mock(spec=['uuid', 'node'])
result = self.pr._get_node(node)
self.assertIs(result, node.node)
self.assertFalse(self.api.baremetal.get_node.called)
def test__get_node_with_instance_refresh(self):
node = mock.Mock(spec=['uuid', 'node'])
result = self.pr._get_node(node, refresh=True)
self.assertIs(result, self.api.baremetal.get_node.return_value)
self.api.baremetal.get_node.assert_called_once_with(node.node.id)
def test__get_node_with_string(self):
result = self.pr._get_node('node')
self.assertIs(result, self.api.baremetal.get_node.return_value)
self.api.baremetal.get_node.assert_called_once_with('node')
def test__find_node_and_allocation_by_node(self):
node = mock.Mock(spec=['id', 'name'])
result, alloc = self.pr._find_node_and_allocation(node)
self.assertIs(result, node)
self.assertIsNone(alloc)
def test__find_node_and_allocation_by_node_not_found(self):
node = mock.Mock(spec=['id', 'name'])
self.api.baremetal.get_node.side_effect = os_exc.ResourceNotFound
self.assertRaises(exceptions.InstanceNotFound,
self.pr._find_node_and_allocation, node,
refresh=True)
def test__find_node_and_allocation_by_hostname(self):
result, alloc = self.pr._find_node_and_allocation('node')
self.assertIs(result, self.api.baremetal.get_node.return_value)
self.assertIs(alloc, self.api.baremetal.get_allocation.return_value)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.get_allocation.return_value.node_id)
def test__find_node_and_allocation_by_node_id(self):
self.api.baremetal.get_allocation.side_effect = (
os_exc.ResourceNotFound())
result, alloc = self.pr._find_node_and_allocation('node')
self.assertIs(result, self.api.baremetal.get_node.return_value)
self.assertIsNone(alloc)
self.api.baremetal.get_node.assert_called_once_with('node')
def test__find_node_and_allocation_by_hostname_node_in_allocation(self):
self.api.baremetal.get_node.side_effect = os_exc.ResourceNotFound
self.assertRaises(exceptions.InstanceNotFound,
self.pr._find_node_and_allocation, 'node')
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.get_allocation.return_value.node_id)
def test__find_node_and_allocation_by_hostname_bad_allocation(self):
self.api.baremetal.get_allocation.return_value.node_id = None
self.assertRaises(exceptions.InstanceNotFound,
self.pr._find_node_and_allocation, 'node')
self.assertFalse(self.api.baremetal.get_node.called)
class TestReserveNode(Base):
RSC = 'baremetal'
def _node(self, **kwargs):
kwargs.setdefault('id', '000')
kwargs.setdefault('properties', {'local_gb': 100})
kwargs.setdefault('instance_info', {})
kwargs.setdefault('instance_id', None)
kwargs.setdefault('is_maintenance', False)
kwargs.setdefault('resource_class', self.RSC)
result = mock.Mock(spec=NODE_FIELDS, **kwargs)
result.name = kwargs.get('name')
return result
def test_no_nodes(self):
self.api.baremetal.nodes.return_value = []
self.assertRaises(exceptions.NodesNotFound,
self.pr.reserve_node, self.RSC,
conductor_group='foo')
self.assertFalse(self.api.baremetal.update_node.called)
def test_simple_ok(self):
expected = self._node()
self.api.baremetal.get_node.return_value = expected
node = self.pr.reserve_node(self.RSC)
self.assertIs(expected, node)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=None,
resource_class=self.RSC, traits=None)
self.api.baremetal.wait_for_allocation.assert_called_once_with(
self.api.baremetal.create_allocation.return_value)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.assertFalse(self.api.baremetal.patch_node.called)
self.assertFalse(self.api.baremetal.delete_allocation.called)
def test_create_allocation_failed(self):
self.api.baremetal.create_allocation.side_effect = (
os_exc.SDKException('boom'))
self.assertRaisesRegex(exceptions.ReservationFailed, 'boom',
self.pr.reserve_node, self.RSC)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=None,
resource_class=self.RSC, traits=None)
self.assertFalse(self.api.baremetal.delete_allocation.called)
self.assertFalse(self.api.baremetal.patch_node.called)
def test_allocation_failed(self):
self.api.baremetal.wait_for_allocation.side_effect = (
os_exc.SDKException('boom'))
self.assertRaisesRegex(exceptions.ReservationFailed, 'boom',
self.pr.reserve_node, self.RSC)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=None,
resource_class=self.RSC, traits=None)
self.api.baremetal.delete_allocation.assert_called_once_with(
self.api.baremetal.create_allocation.return_value)
self.assertFalse(self.api.baremetal.patch_node.called)
@mock.patch.object(_utils.LOG, 'exception', autospec=True)
def test_allocation_failed_clean_up_failed(self, mock_log):
self.api.baremetal.delete_allocation.side_effect = RuntimeError()
self.api.baremetal.wait_for_allocation.side_effect = (
os_exc.SDKException('boom'))
self.assertRaisesRegex(exceptions.ReservationFailed, 'boom',
self.pr.reserve_node, self.RSC)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=None,
resource_class=self.RSC, traits=None)
self.api.baremetal.delete_allocation.assert_called_once_with(
self.api.baremetal.create_allocation.return_value)
self.assertFalse(self.api.baremetal.patch_node.called)
mock_log.assert_called_once_with('Failed to delete failed allocation')
def test_with_hostname(self):
expected = self._node()
self.api.baremetal.get_node.return_value = expected
self.api.baremetal.nodes.return_value = [expected, self._node()]
node = self.pr.reserve_node(self.RSC, hostname='example.com')
self.assertIs(expected, node)
self.api.baremetal.create_allocation.assert_called_once_with(
name='example.com', candidate_nodes=None,
resource_class=self.RSC, traits=None)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.assertFalse(self.api.baremetal.patch_node.called)
def test_with_capabilities(self):
nodes = [
self._node(properties={'local_gb': 100, 'capabilities': caps})
for caps in ['answer:1', 'answer:42', None]
]
expected = nodes[1]
self.api.baremetal.nodes.return_value = nodes
self.api.baremetal.get_node.return_value = expected
node = self.pr.reserve_node(self.RSC, capabilities={'answer': '42'})
self.assertIs(node, expected)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[expected.id],
resource_class=self.RSC, traits=None)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.api.baremetal.patch_node.assert_called_once_with(
node, [{'path': '/instance_info/capabilities',
'op': 'add', 'value': {'answer': '42'}}])
def test_node_update_failed(self):
expected = self._node(properties={'local_gb': 100,
'capabilities': {'answer': '42'}})
self.api.baremetal.get_node.return_value = expected
self.api.baremetal.nodes.return_value = [expected]
self.api.baremetal.patch_node.side_effect = os_exc.SDKException('boom')
self.assertRaisesRegex(exceptions.ReservationFailed, 'boom',
self.pr.reserve_node, self.RSC,
capabilities={'answer': '42'})
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[expected.id],
resource_class=self.RSC, traits=None)
self.api.baremetal.delete_allocation.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value)
self.api.baremetal.patch_node.assert_called_once_with(
expected, [{'path': '/instance_info/capabilities',
'op': 'add', 'value': {'answer': '42'}}])
def test_node_update_unexpected_exception(self):
expected = self._node(properties={'local_gb': 100,
'capabilities': {'answer': '42'}})
self.api.baremetal.get_node.return_value = expected
self.api.baremetal.nodes.return_value = [expected]
self.api.baremetal.patch_node.side_effect = RuntimeError('boom')
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.reserve_node, self.RSC,
capabilities={'answer': '42'})
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[expected.id],
resource_class=self.RSC, traits=None)
self.api.baremetal.delete_allocation.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value)
self.api.baremetal.patch_node.assert_called_once_with(
expected, [{'path': '/instance_info/capabilities',
'op': 'add', 'value': {'answer': '42'}}])
def test_with_traits(self):
expected = self._node(properties={'local_gb': 100},
traits=['foo', 'answer:42'])
self.api.baremetal.get_node.return_value = expected
node = self.pr.reserve_node(self.RSC, traits=['foo', 'answer:42'])
self.assertIs(node, expected)
self.assertFalse(self.api.baremetal.patch_node.called)
def test_custom_predicate(self):
nodes = [self._node(properties={'local_gb': i})
for i in (100, 150, 200)]
self.api.baremetal.nodes.return_value = nodes[:]
self.api.baremetal.get_node.return_value = nodes[1]
node = self.pr.reserve_node(
self.RSC,
predicate=lambda node: 100 < node.properties['local_gb'] < 200)
self.assertEqual(node, nodes[1])
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[nodes[1].id],
resource_class=self.RSC, traits=None)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.assertFalse(self.api.baremetal.patch_node.called)
def test_custom_predicate_false(self):
nodes = [self._node() for _ in range(3)]
self.api.baremetal.nodes.return_value = nodes[:]
self.assertRaisesRegex(exceptions.CustomPredicateFailed,
'custom predicate',
self.pr.reserve_node,
self.RSC,
predicate=lambda node: False)
self.assertFalse(self.api.baremetal.update_node.called)
def test_provided_node(self):
nodes = [self._node()]
self.api.baremetal.get_node.return_value = nodes[0]
node = self.pr.reserve_node(self.RSC, candidates=nodes)
self.assertEqual(node, nodes[0])
self.assertFalse(self.api.baremetal.nodes.called)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[nodes[0].id],
resource_class=self.RSC, traits=None)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.assertFalse(self.api.baremetal.patch_node.called)
def test_provided_nodes(self):
nodes = [self._node(id=1), self._node(id=2)]
self.api.baremetal.get_node.return_value = nodes[0]
node = self.pr.reserve_node(self.RSC, candidates=nodes)
self.assertEqual(node, nodes[0])
self.assertFalse(self.api.baremetal.nodes.called)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[1, 2],
resource_class=self.RSC, traits=None)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.assertFalse(self.api.baremetal.patch_node.called)
def test_provided_node_not_found(self):
self.mock_get_node.side_effect = os_exc.ResourceNotFound
self.assertRaises(exceptions.InvalidNode, self.pr.reserve_node,
self.RSC, candidates=['node1'])
self.assertFalse(self.api.baremetal.nodes.called)
self.assertFalse(self.api.baremetal.create_allocation.called)
self.assertFalse(self.api.baremetal.patch_node.called)
def test_nodes_filtered(self):
nodes = [self._node(resource_class='banana'),
self._node(resource_class='compute'),
self._node(properties={'local_gb': 100,
'capabilities': 'cat:meow'},
resource_class='compute')]
self.api.baremetal.get_node.return_value = nodes[2]
node = self.pr.reserve_node('compute', candidates=nodes,
capabilities={'cat': 'meow'})
self.assertEqual(node, nodes[2])
self.assertFalse(self.api.baremetal.nodes.called)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[nodes[0].id],
resource_class='compute', traits=None)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.api.baremetal.patch_node.assert_called_once_with(
node, [{'path': '/instance_info/capabilities',
'op': 'add', 'value': {'cat': 'meow'}}])
def test_nodes_filtered_by_conductor_group(self):
nodes = [self._node(conductor_group='loc1'),
self._node(properties={'local_gb': 100,
'capabilities': 'cat:meow'},
conductor_group=''),
self._node(properties={'local_gb': 100,
'capabilities': 'cat:meow'},
conductor_group='loc1')]
self.api.baremetal.get_node.return_value = nodes[2]
node = self.pr.reserve_node(self.RSC,
conductor_group='loc1',
candidates=nodes,
capabilities={'cat': 'meow'})
self.assertEqual(node, nodes[2])
self.assertFalse(self.api.baremetal.nodes.called)
self.api.baremetal.create_allocation.assert_called_once_with(
name=None, candidate_nodes=[nodes[2].id],
resource_class=self.RSC, traits=None)
self.api.baremetal.get_node.assert_called_once_with(
self.api.baremetal.wait_for_allocation.return_value.node_id)
self.api.baremetal.patch_node.assert_called_once_with(
node, [{'path': '/instance_info/capabilities',
'op': 'add', 'value': {'cat': 'meow'}}])
def test_provided_nodes_no_match(self):
nodes = [
self._node(resource_class='compute', conductor_group='loc1'),
self._node(resource_class='control', conductor_group='loc2'),
self._node(resource_class='control', conductor_group='loc1',
is_maintenance=True),
self._node(resource_class='control', conductor_group='loc1',
instance_id='abcd')
]
self.assertRaises(exceptions.NodesNotFound,
self.pr.reserve_node, candidates=nodes,
resource_class='control', conductor_group='loc1')
self.assertFalse(self.api.baremetal.nodes.called)
self.assertFalse(self.api.baremetal.update_node.called)
class TestProvisionNode(Base):
def setUp(self):
super(TestProvisionNode, self).setUp()
self.image = self.api.image.find_image.return_value
self.node.instance_id = '123456'
self.node.allocation_id = '123456'
self.allocation = mock.Mock(spec=['id', 'node_id', 'name'],
id='123456',
node_id=self.node.id)
self.allocation.name = 'example.com'
self.instance_info = {
'ramdisk': self.image.ramdisk_id,
'kernel': self.image.kernel_id,
'image_source': self.image.id,
'root_gb': 99, # 100 - 1
'capabilities': {'boot_option': 'local'},
}
self.extra = {
'metalsmith_created_ports': [
self.api.network.create_port.return_value.id
],
'metalsmith_attached_ports': [
self.api.network.create_port.return_value.id
],
}
configdrive_patcher = mock.patch.object(
instance_config.GenericConfig, 'generate', autospec=True)
self.configdrive_mock = configdrive_patcher.start()
self.addCleanup(configdrive_patcher.stop)
create_network_metadata_patches = mock.patch.object(
_network_metadata, 'create_network_metadata', autospec=True
)
self.network_metadata_mock = create_network_metadata_patches.start()
self.addCleanup(create_network_metadata_patches.stop)
self.api.baremetal.get_node.side_effect = lambda _n: self.node
self.api.baremetal.get_allocation.side_effect = (
lambda _a: self.allocation)
def test_ok(self):
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.configdrive_mock.assert_called_once_with(mock.ANY, self.node,
self.allocation.name,
mock.ANY)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(self.api.network.delete_port.called)
def test_old_style_reservation(self):
self.node.allocation_id = None
self.node.instance_id = self.node.id
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.configdrive_mock.assert_called_once_with(mock.ANY, self.node,
self.node.name, mock.ANY)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(self.api.network.delete_port.called)
def test_ok_without_nics(self):
self.extra['metalsmith_created_ports'] = []
self.extra['metalsmith_attached_ports'] = []
inst = self.pr.provision_node(self.node, 'image')
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.network.find_port.called)
self.assertFalse(self.api.baremetal.attach_vif_to_node.called)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_ok_with_source(self):
inst = self.pr.provision_node(self.node, sources.GlanceImage('image'),
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_config(self):
config = mock.Mock(spec=instance_config.GenericConfig)
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}],
config=config)
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
config.generate.assert_called_once_with(self.node,
self.allocation.name, mock.ANY)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_hostname_override(self):
self.allocation.name = None
self.api.baremetal.get_allocation.side_effect = [
os_exc.ResourceNotFound(),
self.allocation
]
def _update(allocation, name):
allocation.name = name
return allocation
self.api.baremetal.update_allocation.side_effect = _update
hostname = 'control-0.example.com'
self.instance_info['display_name'] = hostname
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}],
hostname=hostname)
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertIs(inst.allocation, self.allocation)
self.api.baremetal.update_allocation.assert_called_once_with(
self.allocation, name=hostname)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='control-0.example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.configdrive_mock.assert_called_once_with(mock.ANY, self.node,
hostname, mock.ANY)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_existing_hostname(self):
hostname = 'control-0.example.com'
self.allocation.name = hostname
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertIs(inst.allocation, self.allocation)
self.assertFalse(self.api.baremetal.update_allocation.called)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='control-0.example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.configdrive_mock.assert_called_once_with(mock.ANY, self.node,
hostname, mock.ANY)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_existing_hostname_match(self):
hostname = 'control-0.example.com'
self.instance_info['display_name'] = hostname
self.allocation.name = hostname
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}],
hostname=hostname)
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertIs(inst.allocation, self.allocation)
self.assertFalse(self.api.baremetal.update_allocation.called)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='control-0.example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.configdrive_mock.assert_called_once_with(mock.ANY, self.node,
hostname, mock.ANY)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_existing_hostname_mismatch(self):
self.api.baremetal.get_allocation.side_effect = [
# No allocation with requested hostname
os_exc.ResourceNotFound(),
# Allocation associated with the node
self.allocation
]
self.allocation.name = 'control-0.example.com'
self.assertRaisesRegex(exceptions.InvalidNode,
'does not match the expected hostname',
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}],
hostname='control-1.example.com')
self.api.baremetal.get_allocation.assert_has_calls([
mock.call('control-1.example.com'),
mock.call(self.node.allocation_id),
])
self.assertFalse(self.api.baremetal.create_allocation.called)
self.assertFalse(self.api.baremetal.update_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
self.assertFalse(self.api.baremetal.delete_allocation.called)
def test_node_name_as_hostname(self):
self.allocation.name = None
def _update(allocation, name):
allocation.name = name
return allocation
self.api.baremetal.update_allocation.side_effect = _update
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertIs(inst.allocation, self.allocation)
self.api.baremetal.update_allocation.assert_called_once_with(
self.allocation, name=self.node.name)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='control-0-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.configdrive_mock.assert_called_once_with(mock.ANY, self.node,
self.node.name, mock.ANY)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_name_not_valid_hostname(self):
self.node.name = 'node_1'
self.allocation.name = None
def _update(allocation, name):
allocation.name = name
return allocation
self.api.baremetal.update_allocation.side_effect = _update
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertIs(inst.allocation, self.allocation)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='000-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.configdrive_mock.assert_called_once_with(mock.ANY, self.node,
self.node.id, mock.ANY)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_unreserved(self):
self.node.instance_id = None
self.node.allocation_id = None
self.api.baremetal.get_node.return_value = self.node
self.pr.provision_node(self.node, 'image', [{'network': 'network'}])
self.api.baremetal.create_allocation.assert_called_once_with(
name=self.node.name, candidate_nodes=[self.node.id],
resource_class=self.node.resource_class, traits=None)
self.api.baremetal.get_node.assert_has_calls([
# After allocation
mock.call(
self.api.baremetal.wait_for_allocation.return_value.node_id),
# After deployment
mock.call(self.node.id)
])
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='%s-%s' % (
self.api.baremetal.wait_for_allocation.return_value.name,
self.api.network.find_network.return_value.name
))
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_unreserved_with_hostname(self):
self.node.instance_id = None
self.node.allocation_id = None
self.api.baremetal.get_node.return_value = self.node
hostname = 'control-2.example.com'
self.instance_info['display_name'] = hostname
self.pr.provision_node(self.node, 'image', [{'network': 'network'}],
hostname=hostname)
self.api.baremetal.create_allocation.assert_called_once_with(
name=hostname, candidate_nodes=[self.node.id],
resource_class=self.node.resource_class, traits=None)
self.api.baremetal.get_node.assert_has_calls([
# After allocation
mock.call(
self.api.baremetal.wait_for_allocation.return_value.node_id),
# After deployment
mock.call(self.node.id)
])
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='%s-%s' % (
self.api.baremetal.wait_for_allocation.return_value.name,
self.api.network.find_network.return_value.name
))
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, instance_info=self.instance_info, extra=self.extra)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_unreserved_without_resource_class(self):
self.node.instance_id = None
self.node.allocation_id = None
self.node.resource_class = None
self.api.baremetal.get_node.return_value = self.node
self.assertRaisesRegex(exceptions.InvalidNode,
'does not have a resource class',
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.baremetal.create_allocation.called)
self.assertFalse(self.api.baremetal.update_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
self.assertFalse(self.api.baremetal.delete_allocation.called)
def test_with_ports(self):
port_ids = [self.api.network.find_port.return_value.id] * 2
self.pr.provision_node(self.node, 'image',
[{'port': 'port1'}, {'port': 'port2'}])
self.assertFalse(self.api.network.create_port.called)
self.api.network.update_port.assert_has_calls([
mock.call(self.api.network.find_port.return_value,
binding_host_id=self.node.id),
mock.call(self.api.network.find_port.return_value,
binding_host_id=self.node.id)])
self.api.baremetal.attach_vif_to_node.assert_called_with(
self.node, self.api.network.find_port.return_value.id)
self.assertEqual(2, self.api.baremetal.attach_vif_to_node.call_count)
self.assertEqual([mock.call('port1', ignore_missing=False),
mock.call('port2', ignore_missing=False)],
self.api.network.find_port.call_args_list)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={'metalsmith_created_ports': [],
'metalsmith_attached_ports': port_ids},
instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_ip(self):
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network',
'fixed_ip': '10.0.0.2'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name,
fixed_ips=[{'ip_address': '10.0.0.2'}])
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_subnet(self):
inst = self.pr.provision_node(self.node, 'image',
[{'subnet': 'subnet'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.get_network.return_value.id,
name='example.com-%s' %
self.api.network.get_network.return_value.name,
fixed_ips=[{'subnet_id':
self.api.network.find_subnet.return_value.id}])
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_whole_disk(self):
self.image.kernel_id = None
self.image.ramdisk_id = None
del self.instance_info['kernel']
del self.instance_info['ramdisk']
# Ensure stale values clean up
self.node.instance_info['kernel'] = 'bad value'
self.node.instance_info['ramdisk'] = 'bad value'
self.pr.provision_node(self.node, 'image', [{'network': 'network'}])
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_http_and_checksum_whole_disk(self):
self.instance_info['image_source'] = 'https://host/image'
self.instance_info['image_checksum'] = 'abcd'
del self.instance_info['kernel']
del self.instance_info['ramdisk']
inst = self.pr.provision_node(
self.node,
sources.HttpWholeDiskImage('https://host/image', checksum='abcd'),
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertFalse(self.api.image.find_image.called)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
@mock.patch.object(requests, 'get', autospec=True)
def test_with_http_and_checksum_url(self, mock_get):
self.instance_info['image_source'] = 'https://host/image'
self.instance_info['image_checksum'] = 'abcd'
del self.instance_info['kernel']
del self.instance_info['ramdisk']
mock_get.return_value.text = """
defg *something else
abcd image
"""
inst = self.pr.provision_node(
self.node,
sources.HttpWholeDiskImage('https://host/image',
checksum_url='https://host/checksums'),
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertFalse(self.api.image.find_image.called)
mock_get.assert_called_once_with('https://host/checksums')
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_http_and_checksum_partition(self):
self.instance_info['image_source'] = 'https://host/image'
self.instance_info['image_checksum'] = 'abcd'
self.instance_info['kernel'] = 'https://host/kernel'
self.instance_info['ramdisk'] = 'https://host/ramdisk'
inst = self.pr.provision_node(
self.node,
sources.HttpPartitionImage('https://host/image',
checksum='abcd',
kernel_url='https://host/kernel',
ramdisk_url='https://host/ramdisk'),
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertFalse(self.api.image.find_image.called)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_file_whole_disk(self):
self.instance_info['image_source'] = 'file:///foo/img'
del self.instance_info['kernel']
del self.instance_info['ramdisk']
inst = self.pr.provision_node(
self.node,
sources.FileWholeDiskImage('file:///foo/img'),
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertFalse(self.api.image.find_image.called)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_file_partition(self):
self.instance_info['image_source'] = 'file:///foo/img'
self.instance_info['kernel'] = 'file:///foo/vmlinuz'
self.instance_info['ramdisk'] = 'file:///foo/initrd'
inst = self.pr.provision_node(
self.node,
sources.FilePartitionImage('/foo/img',
'/foo/vmlinuz',
'/foo/initrd'),
[{'network': 'network'}])
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.assertFalse(self.api.image.find_image.called)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_root_size(self):
self.instance_info['root_gb'] = 50
self.pr.provision_node(self.node, 'image', [{'network': 'network'}],
root_size_gb=50)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_swap_size(self):
self.instance_info['swap_mb'] = 4096
self.pr.provision_node(self.node, 'image', [{'network': 'network'}],
swap_size_mb=4096)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_capabilities(self):
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}],
capabilities={'answer': '42'})
self.instance_info['capabilities'] = {'boot_option': 'local',
'answer': '42'}
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_existing_capabilities(self):
self.node.instance_info['capabilities'] = {'answer': '42'}
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}])
self.instance_info['capabilities'] = {'boot_option': 'local',
'answer': '42'}
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_override_existing_capabilities(self):
self.node.instance_info['capabilities'] = {'answer': '1',
'cat': 'meow'}
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}],
capabilities={'answer': '42'})
self.instance_info['capabilities'] = {'boot_option': 'local',
'answer': '42'}
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_traits(self):
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}],
traits=['1', '2'])
self.instance_info['traits'] = ['1', '2']
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_override_existing_traits(self):
self.node.traits = ['42']
inst = self.pr.provision_node(self.node, 'image',
[{'network': 'network'}],
traits=['1', '2'])
self.instance_info['traits'] = ['1', '2']
self.assertEqual(inst.uuid, self.node.id)
self.assertEqual(inst.node, self.node)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_with_wait(self):
self.api.network.find_port.return_value = mock.Mock(
spec=['fixed_ips'],
fixed_ips=[{'ip_address': '192.168.1.5'}, {}]
)
self.pr.provision_node(self.node, 'image', [{'network': 'network'}],
wait=3600)
self.api.network.create_port.assert_called_once_with(
binding_host_id=self.node.id,
network_id=self.api.network.find_network.return_value.id,
name='example.com-%s' %
self.api.network.find_network.return_value.name)
self.api.baremetal.attach_vif_to_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.validate_node.assert_called_once_with(self.node)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
wait_mock = self.api.baremetal.wait_for_nodes_provision_state
wait_mock.assert_called_once_with([self.node], 'active',
timeout=3600)
self.assertFalse(self.api.network.delete_port.called)
def test_dry_run(self):
self.pr._dry_run = True
self.pr.provision_node(self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.attach_vif_to_node.called)
self.assertFalse(self.api.baremetal.update_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_unreserve_dry_run(self):
self.pr._dry_run = True
self.node.allocation_id = None
self.node.instance_id = None
self.pr.provision_node(self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.create_allocation.called)
self.assertFalse(self.api.baremetal.attach_vif_to_node.called)
self.assertFalse(self.api.baremetal.update_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
def test_deploy_failure(self):
self.api.baremetal.set_node_provision_state.side_effect = (
RuntimeError('boom'))
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'n1'}, {'port': 'p1'}],
wait=3600)
self.api.baremetal.update_node.assert_any_call(
self.node, extra={}, instance_info={})
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.network.delete_port.assert_called_once_with(
self.api.network.create_port.return_value.id,
ignore_missing=False)
calls = [
mock.call(self.node,
self.api.network.create_port.return_value.id),
mock.call(self.node, self.api.network.find_port.return_value.id)
]
self.api.baremetal.detach_vif_from_node.assert_has_calls(
calls, any_order=True)
self.api.baremetal.delete_allocation.assert_called_once_with(
self.allocation.id)
def test_deploy_failure_without_allocation(self):
self.node.instance_id = None
self.node.allocation_id = None
self.api.baremetal.set_node_provision_state.side_effect = (
RuntimeError('boom'))
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'n1'}, {'port': 'p1'}],
wait=3600)
self.api.baremetal.update_node.assert_any_call(
self.node, extra={}, instance_info={}, instance_id=None)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.network.delete_port.assert_called_once_with(
self.api.network.create_port.return_value.id,
ignore_missing=False)
calls = [
mock.call(self.node,
self.api.network.create_port.return_value.id),
mock.call(self.node, self.api.network.find_port.return_value.id)
]
self.api.baremetal.detach_vif_from_node.assert_has_calls(
calls, any_order=True)
self.assertFalse(self.api.baremetal.delete_allocation.called)
def test_deploy_failure_no_cleanup(self):
self.node.allocation_id = 'id2'
self.api.baremetal.set_node_provision_state.side_effect = (
RuntimeError('boom'))
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'n1'}, {'port': 'p1'}],
wait=3600, clean_up_on_failure=False)
self.assertEqual(1, self.api.baremetal.update_node.call_count)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
self.assertFalse(self.api.baremetal.detach_vif_from_node.called)
self.assertFalse(self.api.baremetal.delete_allocation.called)
def test_port_creation_failure(self):
self.api.network.create_port.side_effect = RuntimeError('boom')
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'network'}], wait=3600)
self.api.baremetal.delete_allocation.assert_called_once_with(
self.allocation.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
self.assertFalse(self.api.baremetal.detach_vif_from_node.called)
def test_port_attach_failure(self):
self.api.baremetal.attach_vif_to_node.side_effect = (
RuntimeError('boom'))
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'network'}], wait=3600)
self.api.baremetal.delete_allocation.assert_called_once_with(
self.allocation.id)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
self.api.network.delete_port.assert_called_once_with(
self.api.network.create_port.return_value.id,
ignore_missing=False)
self.api.baremetal.detach_vif_from_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
def test_failure_during_port_deletion(self):
self.api.network.delete_port.side_effect = AssertionError()
self.api.baremetal.set_node_provision_state.side_effect = (
RuntimeError('boom'))
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'network'}],
wait=3600)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.network.delete_port.assert_called_once_with(
self.api.network.create_port.return_value.id,
ignore_missing=False)
self.api.baremetal.detach_vif_from_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
def _test_failure_during_deploy_failure(self):
self.api.baremetal.set_node_provision_state.side_effect = (
RuntimeError('boom'))
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'network'}],
wait=3600)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.network.delete_port.assert_called_once_with(
self.api.network.create_port.return_value.id,
ignore_missing=False)
self.api.baremetal.detach_vif_from_node.assert_called_once_with(
self.node, self.api.network.create_port.return_value.id)
def test_detach_failed_after_deploy_failure(self):
self.api.baremetal.detach_vif_from_node.side_effect = AssertionError()
self._test_failure_during_deploy_failure()
def test_update_failed_after_deploy_failure(self):
self.api.baremetal.update_node.side_effect = [self.node,
AssertionError()]
self._test_failure_during_deploy_failure()
def test_deallocation_failed_after_deploy_failure(self):
self.api.baremetal.delete_allocation.side_effect = AssertionError()
self._test_failure_during_deploy_failure()
def test_wait_failure(self):
self.api.baremetal.wait_for_nodes_provision_state.side_effect = (
RuntimeError('boom'))
self.assertRaisesRegex(RuntimeError, 'boom',
self.pr.provision_node, self.node,
'image', [{'network': 'network'}], wait=3600)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra=self.extra, instance_info=self.instance_info)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'active', config_drive=mock.ANY)
self.assertFalse(self.api.network.delete_port.called)
self.assertFalse(self.api.baremetal.detach_vif_from_node.called)
def test_missing_image(self):
self.api.image.find_image.side_effect = os_exc.ResourceNotFound(
'Not found')
self.assertRaisesRegex(exceptions.InvalidImage, 'Not found',
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
@mock.patch.object(requests, 'get', autospec=True)
def test_no_checksum_with_http_image(self, mock_get):
self.instance_info['image_source'] = 'https://host/image'
self.instance_info['image_checksum'] = 'abcd'
del self.instance_info['kernel']
del self.instance_info['ramdisk']
mock_get.return_value.text = """
defg *something else
abcd and-not-image-again
"""
self.assertRaisesRegex(exceptions.InvalidImage,
'no image checksum',
self.pr.provision_node,
self.node,
sources.HttpWholeDiskImage(
'https://host/image',
checksum_url='https://host/checksums'),
[{'network': 'network'}])
self.assertFalse(self.api.image.find_image.called)
mock_get.assert_called_once_with('https://host/checksums')
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
@mock.patch.object(requests, 'get', autospec=True)
def test_malformed_checksum_with_http_image(self, mock_get):
self.instance_info['image_source'] = 'https://host/image'
self.instance_info['image_checksum'] = 'abcd'
del self.instance_info['kernel']
del self.instance_info['ramdisk']
mock_get.return_value.text = """
<html>
<p>I am not a checksum file!</p>
</html>"""
self.assertRaisesRegex(exceptions.InvalidImage,
'Invalid checksum file',
self.pr.provision_node,
self.node,
sources.HttpWholeDiskImage(
'https://host/image',
checksum_url='https://host/checksums'),
[{'network': 'network'}])
self.assertFalse(self.api.image.find_image.called)
mock_get.assert_called_once_with('https://host/checksums')
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
@mock.patch.object(requests, 'get', autospec=True)
def test_cannot_download_checksum_with_http_image(self, mock_get):
self.instance_info['image_source'] = 'https://host/image'
self.instance_info['image_checksum'] = 'abcd'
del self.instance_info['kernel']
del self.instance_info['ramdisk']
mock_get.return_value.raise_for_status.side_effect = (
requests.RequestException("boom"))
self.assertRaisesRegex(exceptions.InvalidImage,
'Cannot download checksum file',
self.pr.provision_node,
self.node,
sources.HttpWholeDiskImage(
'https://host/image',
checksum_url='https://host/checksums'),
[{'network': 'network'}])
self.assertFalse(self.api.image.find_image.called)
mock_get.assert_called_once_with('https://host/checksums')
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_network(self):
self.api.network.find_network.side_effect = os_exc.SDKException(
'Not found')
self.assertRaisesRegex(exceptions.InvalidNIC, 'Not found',
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_port(self):
self.api.network.find_port.side_effect = os_exc.SDKException(
'Not found')
self.assertRaisesRegex(exceptions.InvalidNIC, 'Not found',
self.pr.provision_node,
self.node, 'image', [{'port': 'port1'}])
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_subnet(self):
self.api.network.find_subnet.side_effect = os_exc.SDKException(
'Not found')
self.assertRaisesRegex(exceptions.InvalidNIC, 'Not found',
self.pr.provision_node,
self.node, 'image', [{'subnet': 'subnet'}])
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_network_of_subnet(self):
# NOTE(dtantsur): I doubt this can happen, maybe some race?
self.api.network.get_network.side_effect = os_exc.SDKException(
'Not found')
self.assertRaisesRegex(exceptions.InvalidNIC, 'Not found',
self.pr.provision_node,
self.node, 'image', [{'subnet': 'subnet'}])
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={}, instance_info={})
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_no_local_gb(self):
self.node.properties = {}
self.assertRaises(exceptions.UnknownRootDiskSize,
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_local_gb(self):
for value in (None, 'meow', -42, []):
self.node.properties = {'local_gb': value}
self.assertRaises(exceptions.UnknownRootDiskSize,
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_root_size_gb(self):
self.assertRaises(TypeError,
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}],
root_size_gb={})
self.assertRaises(ValueError,
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}],
root_size_gb=0)
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_nics(self):
self.assertRaisesRegex(TypeError, 'must be a list',
self.pr.provision_node,
self.node, 'image', 42)
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.attach_vif_to_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_nic(self):
for item in ('string', ['string']):
self.assertRaisesRegex(TypeError, 'must be a dict',
self.pr.provision_node,
self.node, 'image', item)
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.attach_vif_to_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_nic_type(self):
self.assertRaisesRegex(exceptions.InvalidNIC,
'Unknown NIC record type',
self.pr.provision_node,
self.node, 'image', [{'foo': 'bar'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.attach_vif_to_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_nic_type_fields(self):
for item in ({'port': '1234', 'foo': 'bar'},
{'port': '1234', 'network': '4321'},
{'network': '4321', 'foo': 'bar'},
{'subnet': '4321', 'foo': 'bar'}):
self.assertRaisesRegex(exceptions.InvalidNIC,
'Unexpected fields',
self.pr.provision_node,
self.node, 'image', [item])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.attach_vif_to_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_hostname(self):
self.assertRaisesRegex(ValueError, 'n_1 cannot be used as a hostname',
self.pr.provision_node,
self.node, 'image', [{'port': 'port1'}],
hostname='n_1')
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_duplicate_hostname(self):
allocation = mock.Mock(spec=['id', 'name', 'node_id'],
node_id='another node')
self.api.baremetal.get_allocation.side_effect = [allocation]
self.assertRaisesRegex(ValueError, 'already uses hostname host',
self.pr.provision_node,
self.node, 'image', [{'port': 'port1'}],
hostname='host')
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_old_style_reservation_with_override(self):
self.node.allocation_id = None
self.node.instance_id = self.node.id
self.assertRaisesRegex(exceptions.InvalidNode,
'does not use allocations',
self.pr.provision_node,
self.node, 'image', [{'port': 'port1'}],
hostname='host')
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_node_not_found(self):
self.mock_get_node.side_effect = RuntimeError('not found')
self.assertRaisesRegex(exceptions.InvalidNode, 'not found',
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.update_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_node_with_external_instance_id(self):
self.node.instance_id = 'nova'
self.node.allocation_id = None
self.assertRaisesRegex(exceptions.InvalidNode,
'reserved by instance nova',
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.update_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_node_in_maintenance(self):
self.node.is_maintenance = True
self.node.maintenance_reason = 'power failure'
self.assertRaisesRegex(exceptions.InvalidNode,
'in maintenance mode .* power failure',
self.pr.provision_node,
self.node, 'image', [{'network': 'network'}])
self.assertFalse(self.api.network.create_port.called)
self.assertFalse(self.api.baremetal.update_node.called)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
def test_invalid_http_source(self):
self.assertRaises(TypeError, sources.HttpWholeDiskImage,
'http://host/image')
self.assertRaises(TypeError, sources.HttpWholeDiskImage,
'http://host/image', checksum='abcd',
checksum_url='http://host/checksum')
self.assertRaises(TypeError, sources.HttpPartitionImage,
'http://host/image', 'http://host/kernel',
'http://host/ramdisk')
self.assertRaises(TypeError, sources.HttpPartitionImage,
'http://host/image', 'http://host/kernel',
'http://host/ramdisk', checksum='abcd',
checksum_url='http://host/checksum')
class TestUnprovisionNode(Base):
def setUp(self):
super(TestUnprovisionNode, self).setUp()
self.node.extra['metalsmith_created_ports'] = ['port1']
self.node.allocation_id = '123'
self.node.provision_state = 'active'
def test_ok(self):
# Check that unrelated extra fields are not touched.
self.node.extra['foo'] = 'bar'
result = self.pr.unprovision_node(self.node)
self.assertIs(result, self.node)
self.api.network.delete_port.assert_called_once_with(
'port1', ignore_missing=False)
self.api.baremetal.detach_vif_from_node.assert_called_once_with(
self.node, 'port1')
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'deleted', wait=False)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={'foo': 'bar'})
self.assertFalse(self.api.baremetal.delete_allocation.called)
# We cannot delete an allocation for an active node, it will be deleted
# automatically.
self.assertFalse(self.api.baremetal.delete_allocation.called)
def test_delete_allocation(self):
self.node.provision_state = 'deploy failed'
# Check that unrelated extra fields are not touched.
self.node.extra['foo'] = 'bar'
result = self.pr.unprovision_node(self.node)
self.assertIs(result, self.node)
self.api.network.delete_port.assert_called_once_with(
'port1', ignore_missing=False)
self.api.baremetal.detach_vif_from_node.assert_called_once_with(
self.node, 'port1')
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'deleted', wait=False)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={'foo': 'bar'})
self.api.baremetal.delete_allocation.assert_called_once_with('123')
def test_with_attached(self):
self.node.extra['metalsmith_attached_ports'] = ['port1', 'port2']
self.pr.unprovision_node(self.node)
self.api.network.delete_port.assert_called_once_with(
'port1', ignore_missing=False)
calls = [mock.call(self.node, 'port1'), mock.call(self.node, 'port2')]
self.api.baremetal.detach_vif_from_node.assert_has_calls(
calls, any_order=True)
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'deleted', wait=False)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={})
def test_with_wait(self):
result = self.pr.unprovision_node(self.node, wait=3600)
self.assertIs(result, self.node)
self.api.network.delete_port.assert_called_once_with(
'port1', ignore_missing=False)
self.api.baremetal.detach_vif_from_node.assert_called_once_with(
self.node, 'port1')
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'deleted', wait=False)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={})
wait_mock = self.api.baremetal.wait_for_nodes_provision_state
wait_mock.assert_called_once_with([self.node], 'available',
timeout=3600)
def test_with_wait_failed(self):
for caught, expected in [(os_exc.ResourceTimeout,
exceptions.DeploymentTimeout),
(os_exc.SDKException,
exceptions.DeploymentFailed)]:
self.api.baremetal.wait_for_nodes_provision_state.side_effect = (
caught)
self.assertRaises(expected, self.pr.unprovision_node,
self.node, wait=3600)
def test_without_allocation(self):
self.node.allocation_id = None
# Check that unrelated extra fields are not touched.
self.node.extra['foo'] = 'bar'
result = self.pr.unprovision_node(self.node)
self.assertIs(result, self.node)
self.api.network.delete_port.assert_called_once_with(
'port1', ignore_missing=False)
self.api.baremetal.detach_vif_from_node.assert_called_once_with(
self.node, 'port1')
self.api.baremetal.set_node_provision_state.assert_called_once_with(
self.node, 'deleted', wait=False)
self.assertFalse(
self.api.baremetal.wait_for_nodes_provision_state.called)
self.api.baremetal.update_node.assert_called_once_with(
self.node, extra={'foo': 'bar'},
instance_id=None)
self.assertFalse(self.api.baremetal.delete_allocation.called)
def test_dry_run(self):
self.pr._dry_run = True
self.pr.unprovision_node(self.node)
self.assertFalse(self.api.baremetal.set_node_provision_state.called)
self.assertFalse(self.api.network.delete_port.called)
self.assertFalse(self.api.baremetal.detach_vif_from_node.called)
self.assertFalse(self.api.baremetal.update_node.called)
class TestShowInstance(unittest.TestCase):
def setUp(self):
super(TestShowInstance, self).setUp()
self.pr = _provisioner.Provisioner(mock.Mock())
self.api = mock.Mock(spec=['baremetal'])
self.pr.connection = self.api
self.node = mock.Mock(spec=NODE_FIELDS + ['to_dict'],
id='000', instance_id=None,
properties={'local_gb': 100},
instance_info={},
is_maintenance=False, extra={},
provision_state='active',
allocation_id=None)
self.node.name = 'control-0'
self.api.baremetal.get_node.return_value = self.node
def test_show_instance(self):
self.api.baremetal.get_allocation.side_effect = (
os_exc.ResourceNotFound())
inst = self.pr.show_instance('id1')
self.assertIsInstance(inst, _instance.Instance)
self.assertIs(inst.node, self.node)
self.assertIs(inst.uuid, self.node.id)
self.api.baremetal.get_node.assert_called_once_with('id1')
def test_show_instance_with_allocation(self):
self.api.baremetal.get_allocation.return_value.node_id = '1234'
inst = self.pr.show_instance('id1')
self.api.baremetal.get_allocation.assert_called_once_with('id1')
self.assertIsInstance(inst, _instance.Instance)
self.assertIs(inst.allocation,
self.api.baremetal.get_allocation.return_value)
self.assertIs(inst.node, self.node)
self.assertIs(inst.uuid, self.node.id)
self.api.baremetal.get_node.assert_called_once_with('1234')
def test_show_instances(self):
self.api.baremetal.get_allocation.side_effect = [
os_exc.ResourceNotFound(),
mock.Mock(node_id='4321'),
]
result = self.pr.show_instances(['inst-1', 'inst-2'])
self.api.baremetal.get_node.assert_has_calls([
mock.call('inst-1'),
mock.call('4321'),
])
self.api.baremetal.get_allocation.assert_has_calls([
mock.call('inst-1'),
mock.call('inst-2'),
])
self.assertIsInstance(result, list)
for inst in result:
self.assertIsInstance(inst, _instance.Instance)
self.assertIs(result[0].node, self.node)
self.assertIs(result[0].uuid, self.node.id)
def test_show_instance_invalid_state(self):
self.node.provision_state = 'manageable'
self.api.baremetal.get_allocation.side_effect = (
os_exc.ResourceNotFound())
self.assertRaises(exceptions.InstanceNotFound,
self.pr.show_instance, 'id1')
self.api.baremetal.get_node.assert_called_once_with('id1')
class TestWaitForProvisioning(Base):
def test_success(self):
node = mock.Mock(spec=NODE_FIELDS)
result = self.pr.wait_for_provisioning([node])
self.assertEqual([node], [inst.node for inst in result])
self.assertIsInstance(result[0], _instance.Instance)
def test_exceptions(self):
node = mock.Mock(spec=NODE_FIELDS)
for caught, expected in [(os_exc.ResourceTimeout,
exceptions.DeploymentTimeout),
(os_exc.SDKException,
exceptions.DeploymentFailed)]:
self.api.baremetal.wait_for_nodes_provision_state.side_effect = (
caught)
self.assertRaises(expected, self.pr.wait_for_provisioning, [node])
class TestListInstances(Base):
def setUp(self):
super(TestListInstances, self).setUp()
self.nodes = [
mock.Mock(spec=NODE_FIELDS, provision_state=state,
instance_id='1234', allocation_id=None)
for state in ('active', 'active', 'deploying', 'wait call-back',
'deploy failed', 'available', 'available', 'enroll')
]
self.nodes[0].allocation_id = 'id2'
self.nodes[6].instance_id = None
self.api.baremetal.nodes.return_value = self.nodes
def test_list(self):
instances = self.pr.list_instances()
self.assertTrue(all(isinstance(i, _instance.Instance)
for i in instances))
self.assertEqual(self.nodes[:6], [i.node for i in instances])
self.assertEqual([self.api.baremetal.get_allocation.return_value]
+ [None] * 5,
[i.allocation for i in instances])
self.api.baremetal.nodes.assert_called_once_with(associated=True,
details=True)
| 48.323431 | 79 | 0.641134 | 11,908 | 100,851 | 5.165687 | 0.035018 | 0.072375 | 0.108465 | 0.080308 | 0.882529 | 0.861737 | 0.841594 | 0.822412 | 0.800676 | 0.778811 | 0 | 0.004898 | 0.250954 | 100,851 | 2,086 | 80 | 48.346596 | 0.809391 | 0.010154 | 0 | 0.733296 | 0 | 0 | 0.05943 | 0.005742 | 0 | 0 | 0 | 0 | 0.33352 | 1 | 0.069624 | false | 0 | 0.006176 | 0 | 0.083661 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.