hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
599f43b4a7d40bc85fbc6e0f57955e8d70d1d9b6
| 204
|
py
|
Python
|
BatchProgramParameters.py
|
jakub-tomczak/ror-clr
|
02f88b4083d33acdb50c9d060eca1b6cfb8476fe
|
[
"MIT"
] | null | null | null |
BatchProgramParameters.py
|
jakub-tomczak/ror-clr
|
02f88b4083d33acdb50c9d060eca1b6cfb8476fe
|
[
"MIT"
] | null | null | null |
BatchProgramParameters.py
|
jakub-tomczak/ror-clr
|
02f88b4083d33acdb50c9d060eca1b6cfb8476fe
|
[
"MIT"
] | null | null | null |
class BatchProgramParameters:
def __init__(self, config_file: str):
self.__config_file: str = config_file
@property
def config_file(self) -> str:
return self.__config_file
| 29.142857
| 45
| 0.681373
| 24
| 204
| 5.25
| 0.416667
| 0.396825
| 0.333333
| 0.269841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.240196
| 204
| 7
| 46
| 29.142857
| 0.812903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
ab68a1db317ce5e55542b88a1dd94fe007a27e7b
| 288
|
py
|
Python
|
tests/pyconverter-test/cases/array_operators.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 977
|
2019-05-06T23:12:55.000Z
|
2022-03-29T19:11:44.000Z
|
tests/pyconverter-test/cases/array_operators.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 3,980
|
2019-05-09T20:48:14.000Z
|
2022-03-28T20:33:07.000Z
|
tests/pyconverter-test/cases/array_operators.py
|
jaydeetay/pxt
|
aad1beaf15edc46e1327806367298cbc942dcbc1
|
[
"MIT"
] | 306
|
2016-04-09T05:28:07.000Z
|
2019-05-02T14:23:29.000Z
|
x = (1, 2) + (3, 4)
y = (1, 2) < (3, 4)
y = (1, 2) <= (3, 4)
y = (1, 2) > (3, 4)
y = (1, 2) >= (3, 4)
y = (1, 2) == (3, 4)
y = (1, 2) != (3, 4)
a = [1, 2] + [3, 4]
b = [1, 2] < [3, 4]
b = [1, 2] <= [3, 4]
b = [1, 2] > [3, 4]
b = [1, 2] >= [3, 4]
b = [1, 2] == [3, 4]
b = [1, 2] != [3, 4]
| 19.2
| 20
| 0.243056
| 70
| 288
| 1
| 0.114286
| 0.4
| 0.6
| 0.8
| 0.971429
| 0.971429
| 0.971429
| 0.971429
| 0.971429
| 0.971429
| 0
| 0.294737
| 0.340278
| 288
| 15
| 21
| 19.2
| 0.073684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
ab7a03862b54737f933ba88fa95be95d2c1a480a
| 2,388
|
py
|
Python
|
tests/test_telluric_context.py
|
lilsnore/telluric
|
e05b020eba36d8be4a670ccd9871ad2e6beffe8d
|
[
"MIT"
] | 81
|
2018-04-12T12:29:06.000Z
|
2022-03-17T09:41:55.000Z
|
tests/test_telluric_context.py
|
lilsnore/telluric
|
e05b020eba36d8be4a670ccd9871ad2e6beffe8d
|
[
"MIT"
] | 283
|
2018-04-09T11:32:25.000Z
|
2022-03-25T22:16:38.000Z
|
tests/test_telluric_context.py
|
lilsnore/telluric
|
e05b020eba36d8be4a670ccd9871ad2e6beffe8d
|
[
"MIT"
] | 22
|
2018-04-09T10:53:52.000Z
|
2022-02-09T10:38:33.000Z
|
from telluric.context import local_context, TelluricContext
def test_context_in_one_level():
with TelluricContext(a=1, b=2, c='stam', d={'a': 'a', 'b': 'b'}):
assert local_context.get('a') == 1
assert local_context.get('b') == 2
assert local_context.get('c') == 'stam'
assert local_context.get('d') == {'a': 'a', 'b': 'b'}
assert local_context._options == {}
def test_context_in_two_level():
with TelluricContext(a=1, b=2, c='stam', d={'a': 'a', 'b': 'b'}):
with TelluricContext(a=4, b=5, x='something', y=13):
assert local_context.get('a') == 4
assert local_context.get('b') == 5
assert local_context.get('c') == 'stam'
assert local_context.get('d') == {'a': 'a', 'b': 'b'}
assert local_context.get('x') == 'something'
assert local_context.get('y') == 13
assert local_context.get('a') == 1
assert local_context.get('b') == 2
assert local_context.get('c') == 'stam'
assert local_context.get('d') == {'a': 'a', 'b': 'b'}
assert local_context._options == {}
def test_different_context_on_different_threads():
import threading
from time import sleep
def thread_test_1():
with TelluricContext(a=1, b=2, c='stam', d={'a': 'a', 'b': 'b'}):
sleep(0.1)
assert local_context.get('a') == 1
assert local_context.get('b') == 2
assert local_context.get('c') == 'stam'
assert local_context.get('d') == {'a': 'a', 'b': 'b'}
assert local_context.get('x') is None
assert local_context.get('y') is None
assert local_context._options == {}
def thread_test_2():
with TelluricContext(a=4, b=5, x='something', y=13):
assert local_context.get('a') == 4
assert local_context.get('b') == 5
assert local_context.get('x') == 'something'
assert local_context.get('y') == 13
assert local_context.get('c') is None
assert local_context.get('d') is None
sleep(0.1)
assert local_context._options == {}
t1 = threading.Thread(target=thread_test_1)
t2 = threading.Thread(target=thread_test_2)
t1.start()
t2.start()
assert local_context._options == {}
t1.join()
t2.join()
assert local_context._options == {}
| 37.904762
| 73
| 0.566164
| 323
| 2,388
| 4
| 0.136223
| 0.306502
| 0.44582
| 0.422601
| 0.818111
| 0.713622
| 0.647059
| 0.647059
| 0.647059
| 0.647059
| 0
| 0.02381
| 0.261307
| 2,388
| 62
| 74
| 38.516129
| 0.708617
| 0
| 0
| 0.660377
| 0
| 0
| 0.049414
| 0
| 0
| 0
| 0
| 0
| 0.603774
| 1
| 0.09434
| false
| 0
| 0.056604
| 0
| 0.150943
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
abae12b3d66a4fe2bc6b2b9564262c3ab155fcdd
| 219
|
py
|
Python
|
nwb_conversion_tools/interfaces/imaging/__init__.py
|
sneakers-the-rat/nwb-conversion-tools
|
46a242f01ba80e489a1d4e89c8612036c7f04f56
|
[
"BSD-3-Clause"
] | null | null | null |
nwb_conversion_tools/interfaces/imaging/__init__.py
|
sneakers-the-rat/nwb-conversion-tools
|
46a242f01ba80e489a1d4e89c8612036c7f04f56
|
[
"BSD-3-Clause"
] | null | null | null |
nwb_conversion_tools/interfaces/imaging/__init__.py
|
sneakers-the-rat/nwb-conversion-tools
|
46a242f01ba80e489a1d4e89c8612036c7f04f56
|
[
"BSD-3-Clause"
] | 1
|
2021-06-28T20:38:31.000Z
|
2021-06-28T20:38:31.000Z
|
from nwb_conversion_tools.interfaces.imaging.base_imaging import BaseImagingExtractorInterface
from nwb_conversion_tools.interfaces.imaging.imaging import TiffImagingInterface, SbxImagingInterface, Hdf5ImagingInterface
| 73
| 123
| 0.917808
| 21
| 219
| 9.333333
| 0.571429
| 0.071429
| 0.173469
| 0.22449
| 0.397959
| 0.397959
| 0
| 0
| 0
| 0
| 0
| 0.004785
| 0.045662
| 219
| 2
| 124
| 109.5
| 0.933014
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
abd3ab12a711b6a6c4537c8c5ffde7775324dfcd
| 40,058
|
py
|
Python
|
sdk/python/pulumi_oci/managementagent/management_agent.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/managementagent/management_agent.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/managementagent/management_agent.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ManagementAgentArgs', 'ManagementAgent']
@pulumi.input_type
class ManagementAgentArgs:
def __init__(__self__, *,
managed_agent_id: pulumi.Input[str],
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_plugins_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_agent_auto_upgradable: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a ManagementAgent resource.
:param pulumi.Input[str] managed_agent_id: Unique Management Agent identifier
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_plugins_ids: (Updatable) Plugin Id list
:param pulumi.Input[str] display_name: (Updatable) New displayName of Agent.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[bool] is_agent_auto_upgradable: (Updatable) if set to true then, agent can be upgraded automatically else needs to be upgraded manually.
"""
pulumi.set(__self__, "managed_agent_id", managed_agent_id)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if deploy_plugins_ids is not None:
pulumi.set(__self__, "deploy_plugins_ids", deploy_plugins_ids)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if is_agent_auto_upgradable is not None:
pulumi.set(__self__, "is_agent_auto_upgradable", is_agent_auto_upgradable)
@property
@pulumi.getter(name="managedAgentId")
def managed_agent_id(self) -> pulumi.Input[str]:
"""
Unique Management Agent identifier
"""
return pulumi.get(self, "managed_agent_id")
@managed_agent_id.setter
def managed_agent_id(self, value: pulumi.Input[str]):
pulumi.set(self, "managed_agent_id", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="deployPluginsIds")
def deploy_plugins_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) Plugin Id list
"""
return pulumi.get(self, "deploy_plugins_ids")
@deploy_plugins_ids.setter
def deploy_plugins_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "deploy_plugins_ids", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) New displayName of Agent.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter(name="isAgentAutoUpgradable")
def is_agent_auto_upgradable(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) if set to true then, agent can be upgraded automatically else needs to be upgraded manually.
"""
return pulumi.get(self, "is_agent_auto_upgradable")
@is_agent_auto_upgradable.setter
def is_agent_auto_upgradable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_agent_auto_upgradable", value)
@pulumi.input_type
class _ManagementAgentState:
def __init__(__self__, *,
availability_status: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_plugins_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
host: Optional[pulumi.Input[str]] = None,
install_key_id: Optional[pulumi.Input[str]] = None,
install_path: Optional[pulumi.Input[str]] = None,
is_agent_auto_upgradable: Optional[pulumi.Input[bool]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
managed_agent_id: Optional[pulumi.Input[str]] = None,
platform_name: Optional[pulumi.Input[str]] = None,
platform_type: Optional[pulumi.Input[str]] = None,
platform_version: Optional[pulumi.Input[str]] = None,
plugin_lists: Optional[pulumi.Input[Sequence[pulumi.Input['ManagementAgentPluginListArgs']]]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_last_heartbeat: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ManagementAgent resources.
:param pulumi.Input[str] availability_status: The current availability status of managementAgent
:param pulumi.Input[str] compartment_id: Compartment Identifier
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_plugins_ids: (Updatable) Plugin Id list
:param pulumi.Input[str] display_name: (Updatable) New displayName of Agent.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] host: Management Agent host machine name
:param pulumi.Input[str] install_key_id: agent install key identifier
:param pulumi.Input[str] install_path: Path where Management Agent is installed
:param pulumi.Input[bool] is_agent_auto_upgradable: (Updatable) if set to true then, agent can be upgraded automatically else needs to be upgraded manually.
:param pulumi.Input[str] lifecycle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input[str] managed_agent_id: Unique Management Agent identifier
:param pulumi.Input[str] platform_name: Platform Name
:param pulumi.Input[str] platform_type: Platform Type
:param pulumi.Input[str] platform_version: Platform Version
:param pulumi.Input[Sequence[pulumi.Input['ManagementAgentPluginListArgs']]] plugin_lists: list of managementAgentPlugins associated with the agent
:param pulumi.Input[str] state: The current state of managementAgent
:param pulumi.Input[str] time_created: The time the Management Agent was created. An RFC3339 formatted datetime string
:param pulumi.Input[str] time_last_heartbeat: The time the Management Agent has last recorded its health status in telemetry. This value will be null if the agent has not recorded its health status in last 7 days. An RFC3339 formatted datetime string
:param pulumi.Input[str] time_updated: The time the Management Agent was updated. An RFC3339 formatted datetime string
:param pulumi.Input[str] version: Management Agent Version
"""
if availability_status is not None:
pulumi.set(__self__, "availability_status", availability_status)
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if defined_tags is not None:
pulumi.set(__self__, "defined_tags", defined_tags)
if deploy_plugins_ids is not None:
pulumi.set(__self__, "deploy_plugins_ids", deploy_plugins_ids)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if freeform_tags is not None:
pulumi.set(__self__, "freeform_tags", freeform_tags)
if host is not None:
pulumi.set(__self__, "host", host)
if install_key_id is not None:
pulumi.set(__self__, "install_key_id", install_key_id)
if install_path is not None:
pulumi.set(__self__, "install_path", install_path)
if is_agent_auto_upgradable is not None:
pulumi.set(__self__, "is_agent_auto_upgradable", is_agent_auto_upgradable)
if lifecycle_details is not None:
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
if managed_agent_id is not None:
pulumi.set(__self__, "managed_agent_id", managed_agent_id)
if platform_name is not None:
pulumi.set(__self__, "platform_name", platform_name)
if platform_type is not None:
pulumi.set(__self__, "platform_type", platform_type)
if platform_version is not None:
pulumi.set(__self__, "platform_version", platform_version)
if plugin_lists is not None:
pulumi.set(__self__, "plugin_lists", plugin_lists)
if state is not None:
pulumi.set(__self__, "state", state)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_last_heartbeat is not None:
pulumi.set(__self__, "time_last_heartbeat", time_last_heartbeat)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="availabilityStatus")
def availability_status(self) -> Optional[pulumi.Input[str]]:
"""
The current availability status of managementAgent
"""
return pulumi.get(self, "availability_status")
@availability_status.setter
def availability_status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "availability_status", value)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
Compartment Identifier
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@defined_tags.setter
def defined_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "defined_tags", value)
@property
@pulumi.getter(name="deployPluginsIds")
def deploy_plugins_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
(Updatable) Plugin Id list
"""
return pulumi.get(self, "deploy_plugins_ids")
@deploy_plugins_ids.setter
def deploy_plugins_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "deploy_plugins_ids", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) New displayName of Agent.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@freeform_tags.setter
def freeform_tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "freeform_tags", value)
@property
@pulumi.getter
def host(self) -> Optional[pulumi.Input[str]]:
"""
Management Agent host machine name
"""
return pulumi.get(self, "host")
@host.setter
def host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host", value)
@property
@pulumi.getter(name="installKeyId")
def install_key_id(self) -> Optional[pulumi.Input[str]]:
"""
agent install key identifier
"""
return pulumi.get(self, "install_key_id")
@install_key_id.setter
def install_key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "install_key_id", value)
@property
@pulumi.getter(name="installPath")
def install_path(self) -> Optional[pulumi.Input[str]]:
"""
Path where Management Agent is installed
"""
return pulumi.get(self, "install_path")
@install_path.setter
def install_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "install_path", value)
@property
@pulumi.getter(name="isAgentAutoUpgradable")
def is_agent_auto_upgradable(self) -> Optional[pulumi.Input[bool]]:
"""
(Updatable) if set to true then, agent can be upgraded automatically else needs to be upgraded manually.
"""
return pulumi.get(self, "is_agent_auto_upgradable")
@is_agent_auto_upgradable.setter
def is_agent_auto_upgradable(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_agent_auto_upgradable", value)
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> Optional[pulumi.Input[str]]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@lifecycle_details.setter
def lifecycle_details(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lifecycle_details", value)
@property
@pulumi.getter(name="managedAgentId")
def managed_agent_id(self) -> Optional[pulumi.Input[str]]:
"""
Unique Management Agent identifier
"""
return pulumi.get(self, "managed_agent_id")
@managed_agent_id.setter
def managed_agent_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_agent_id", value)
@property
@pulumi.getter(name="platformName")
def platform_name(self) -> Optional[pulumi.Input[str]]:
"""
Platform Name
"""
return pulumi.get(self, "platform_name")
@platform_name.setter
def platform_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform_name", value)
@property
@pulumi.getter(name="platformType")
def platform_type(self) -> Optional[pulumi.Input[str]]:
"""
Platform Type
"""
return pulumi.get(self, "platform_type")
@platform_type.setter
def platform_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform_type", value)
@property
@pulumi.getter(name="platformVersion")
def platform_version(self) -> Optional[pulumi.Input[str]]:
"""
Platform Version
"""
return pulumi.get(self, "platform_version")
@platform_version.setter
def platform_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform_version", value)
@property
@pulumi.getter(name="pluginLists")
def plugin_lists(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ManagementAgentPluginListArgs']]]]:
"""
list of managementAgentPlugins associated with the agent
"""
return pulumi.get(self, "plugin_lists")
@plugin_lists.setter
def plugin_lists(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ManagementAgentPluginListArgs']]]]):
pulumi.set(self, "plugin_lists", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of managementAgent
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The time the Management Agent was created. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeLastHeartbeat")
def time_last_heartbeat(self) -> Optional[pulumi.Input[str]]:
"""
The time the Management Agent has last recorded its health status in telemetry. This value will be null if the agent has not recorded its health status in last 7 days. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_last_heartbeat")
@time_last_heartbeat.setter
def time_last_heartbeat(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_last_heartbeat", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The time the Management Agent was updated. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Management Agent Version
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
class ManagementAgent(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_plugins_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_agent_auto_upgradable: Optional[pulumi.Input[bool]] = None,
managed_agent_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Management Agent resource in Oracle Cloud Infrastructure Management Agent service.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_management_agent = oci.managementagent.ManagementAgent("testManagementAgent", managed_agent_id=oci_management_agent_managed_agent["test_managed_agent"]["id"])
```
## Import
ManagementAgents can be imported using the `id`, e.g.
```sh
$ pulumi import oci:managementagent/managementAgent:ManagementAgent test_management_agent "id"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_plugins_ids: (Updatable) Plugin Id list
:param pulumi.Input[str] display_name: (Updatable) New displayName of Agent.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[bool] is_agent_auto_upgradable: (Updatable) if set to true then, agent can be upgraded automatically else needs to be upgraded manually.
:param pulumi.Input[str] managed_agent_id: Unique Management Agent identifier
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ManagementAgentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Management Agent resource in Oracle Cloud Infrastructure Management Agent service.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_management_agent = oci.managementagent.ManagementAgent("testManagementAgent", managed_agent_id=oci_management_agent_managed_agent["test_managed_agent"]["id"])
```
## Import
ManagementAgents can be imported using the `id`, e.g.
```sh
$ pulumi import oci:managementagent/managementAgent:ManagementAgent test_management_agent "id"
```
:param str resource_name: The name of the resource.
:param ManagementAgentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ManagementAgentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_plugins_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_agent_auto_upgradable: Optional[pulumi.Input[bool]] = None,
managed_agent_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ManagementAgentArgs.__new__(ManagementAgentArgs)
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["deploy_plugins_ids"] = deploy_plugins_ids
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["is_agent_auto_upgradable"] = is_agent_auto_upgradable
if managed_agent_id is None and not opts.urn:
raise TypeError("Missing required property 'managed_agent_id'")
__props__.__dict__["managed_agent_id"] = managed_agent_id
__props__.__dict__["availability_status"] = None
__props__.__dict__["compartment_id"] = None
__props__.__dict__["host"] = None
__props__.__dict__["install_key_id"] = None
__props__.__dict__["install_path"] = None
__props__.__dict__["lifecycle_details"] = None
__props__.__dict__["platform_name"] = None
__props__.__dict__["platform_type"] = None
__props__.__dict__["platform_version"] = None
__props__.__dict__["plugin_lists"] = None
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_last_heartbeat"] = None
__props__.__dict__["time_updated"] = None
__props__.__dict__["version"] = None
super(ManagementAgent, __self__).__init__(
'oci:managementagent/managementAgent:ManagementAgent',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
availability_status: Optional[pulumi.Input[str]] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
defined_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
deploy_plugins_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
display_name: Optional[pulumi.Input[str]] = None,
freeform_tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
host: Optional[pulumi.Input[str]] = None,
install_key_id: Optional[pulumi.Input[str]] = None,
install_path: Optional[pulumi.Input[str]] = None,
is_agent_auto_upgradable: Optional[pulumi.Input[bool]] = None,
lifecycle_details: Optional[pulumi.Input[str]] = None,
managed_agent_id: Optional[pulumi.Input[str]] = None,
platform_name: Optional[pulumi.Input[str]] = None,
platform_type: Optional[pulumi.Input[str]] = None,
platform_version: Optional[pulumi.Input[str]] = None,
plugin_lists: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ManagementAgentPluginListArgs']]]]] = None,
state: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_last_heartbeat: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None) -> 'ManagementAgent':
"""
Get an existing ManagementAgent resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] availability_status: The current availability status of managementAgent
:param pulumi.Input[str] compartment_id: Compartment Identifier
:param pulumi.Input[Mapping[str, Any]] defined_tags: (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
:param pulumi.Input[Sequence[pulumi.Input[str]]] deploy_plugins_ids: (Updatable) Plugin Id list
:param pulumi.Input[str] display_name: (Updatable) New displayName of Agent.
:param pulumi.Input[Mapping[str, Any]] freeform_tags: (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
:param pulumi.Input[str] host: Management Agent host machine name
:param pulumi.Input[str] install_key_id: agent install key identifier
:param pulumi.Input[str] install_path: Path where Management Agent is installed
:param pulumi.Input[bool] is_agent_auto_upgradable: (Updatable) if set to true then, agent can be upgraded automatically else needs to be upgraded manually.
:param pulumi.Input[str] lifecycle_details: A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
:param pulumi.Input[str] managed_agent_id: Unique Management Agent identifier
:param pulumi.Input[str] platform_name: Platform Name
:param pulumi.Input[str] platform_type: Platform Type
:param pulumi.Input[str] platform_version: Platform Version
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ManagementAgentPluginListArgs']]]] plugin_lists: list of managementAgentPlugins associated with the agent
:param pulumi.Input[str] state: The current state of managementAgent
:param pulumi.Input[str] time_created: The time the Management Agent was created. An RFC3339 formatted datetime string
:param pulumi.Input[str] time_last_heartbeat: The time the Management Agent has last recorded its health status in telemetry. This value will be null if the agent has not recorded its health status in last 7 days. An RFC3339 formatted datetime string
:param pulumi.Input[str] time_updated: The time the Management Agent was updated. An RFC3339 formatted datetime string
:param pulumi.Input[str] version: Management Agent Version
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ManagementAgentState.__new__(_ManagementAgentState)
__props__.__dict__["availability_status"] = availability_status
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["defined_tags"] = defined_tags
__props__.__dict__["deploy_plugins_ids"] = deploy_plugins_ids
__props__.__dict__["display_name"] = display_name
__props__.__dict__["freeform_tags"] = freeform_tags
__props__.__dict__["host"] = host
__props__.__dict__["install_key_id"] = install_key_id
__props__.__dict__["install_path"] = install_path
__props__.__dict__["is_agent_auto_upgradable"] = is_agent_auto_upgradable
__props__.__dict__["lifecycle_details"] = lifecycle_details
__props__.__dict__["managed_agent_id"] = managed_agent_id
__props__.__dict__["platform_name"] = platform_name
__props__.__dict__["platform_type"] = platform_type
__props__.__dict__["platform_version"] = platform_version
__props__.__dict__["plugin_lists"] = plugin_lists
__props__.__dict__["state"] = state
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_last_heartbeat"] = time_last_heartbeat
__props__.__dict__["time_updated"] = time_updated
__props__.__dict__["version"] = version
return ManagementAgent(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="availabilityStatus")
def availability_status(self) -> pulumi.Output[str]:
"""
The current availability status of managementAgent
"""
return pulumi.get(self, "availability_status")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
Compartment Identifier
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter(name="deployPluginsIds")
def deploy_plugins_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
(Updatable) Plugin Id list
"""
return pulumi.get(self, "deploy_plugins_ids")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
(Updatable) New displayName of Agent.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> pulumi.Output[Mapping[str, Any]]:
"""
(Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm). Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def host(self) -> pulumi.Output[str]:
"""
Management Agent host machine name
"""
return pulumi.get(self, "host")
@property
@pulumi.getter(name="installKeyId")
def install_key_id(self) -> pulumi.Output[str]:
"""
agent install key identifier
"""
return pulumi.get(self, "install_key_id")
@property
@pulumi.getter(name="installPath")
def install_path(self) -> pulumi.Output[str]:
"""
Path where Management Agent is installed
"""
return pulumi.get(self, "install_path")
@property
@pulumi.getter(name="isAgentAutoUpgradable")
def is_agent_auto_upgradable(self) -> pulumi.Output[bool]:
"""
(Updatable) if set to true then, agent can be upgraded automatically else needs to be upgraded manually.
"""
return pulumi.get(self, "is_agent_auto_upgradable")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> pulumi.Output[str]:
"""
A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter(name="managedAgentId")
def managed_agent_id(self) -> pulumi.Output[str]:
"""
Unique Management Agent identifier
"""
return pulumi.get(self, "managed_agent_id")
@property
@pulumi.getter(name="platformName")
def platform_name(self) -> pulumi.Output[str]:
"""
Platform Name
"""
return pulumi.get(self, "platform_name")
@property
@pulumi.getter(name="platformType")
def platform_type(self) -> pulumi.Output[str]:
"""
Platform Type
"""
return pulumi.get(self, "platform_type")
@property
@pulumi.getter(name="platformVersion")
def platform_version(self) -> pulumi.Output[str]:
"""
Platform Version
"""
return pulumi.get(self, "platform_version")
@property
@pulumi.getter(name="pluginLists")
def plugin_lists(self) -> pulumi.Output[Sequence['outputs.ManagementAgentPluginList']]:
"""
list of managementAgentPlugins associated with the agent
"""
return pulumi.get(self, "plugin_lists")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of managementAgent
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The time the Management Agent was created. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeLastHeartbeat")
def time_last_heartbeat(self) -> pulumi.Output[str]:
"""
The time the Management Agent has last recorded its health status in telemetry. This value will be null if the agent has not recorded its health status in last 7 days. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_last_heartbeat")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The time the Management Agent was updated. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_updated")
@property
@pulumi.getter
def version(self) -> pulumi.Output[str]:
"""
Management Agent Version
"""
return pulumi.get(self, "version")
| 47.688095
| 346
| 0.6722
| 4,728
| 40,058
| 5.448604
| 0.054146
| 0.081557
| 0.067932
| 0.060634
| 0.880517
| 0.845231
| 0.823765
| 0.798843
| 0.792943
| 0.717053
| 0
| 0.002148
| 0.221154
| 40,058
| 839
| 347
| 47.744934
| 0.823552
| 0.330296
| 0
| 0.601227
| 1
| 0
| 0.120847
| 0.019218
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167689
| false
| 0.002045
| 0.014315
| 0
| 0.288344
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
abd6921cb5a8c468d5f2ea8cedc52077e6912b44
| 3,414
|
py
|
Python
|
tests/test_configsort.py
|
chepazzo/ansible-filters-extras
|
2373d66089cb6497a29577c9de2a705ec9a67ea2
|
[
"BSD-2-Clause"
] | 1
|
2016-06-22T06:23:58.000Z
|
2016-06-22T06:23:58.000Z
|
tests/test_configsort.py
|
chepazzo/ansible-filters-extras
|
2373d66089cb6497a29577c9de2a705ec9a67ea2
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_configsort.py
|
chepazzo/ansible-filters-extras
|
2373d66089cb6497a29577c9de2a705ec9a67ea2
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function, absolute_import
import unittest
from library.configsort import sort_config
class SortTestCase(unittest.TestCase):
def test_basic_sort(self):
orig_config = """
interface eth 3
ip address 3.3.3.3/32
load interval 5
interface eth 1
ip address 1.1.1.1/32
description "eth1 rules"
load interval 5
"""
exp_config = """
interface eth 1
description "eth1 rules"
ip address 1.1.1.1/32
load interval 5
interface eth 3
ip address 3.3.3.3/32
load interval 5
""".strip()
sorted_config = sort_config(orig_config.split('\n'))
self.assertEqual('\n'.join(sorted_config), exp_config, '')
def test_duplicate_top_level(self):
orig_config = """
interface eth 3
load interval 5
interface eth 1
ip address 1.1.1.1/32
description "eth1 rules"
load interval 5
interface eth 3
ip address 3.3.3.3/32
"""
exp_config = """
interface eth 1
description "eth1 rules"
ip address 1.1.1.1/32
load interval 5
interface eth 3
ip address 3.3.3.3/32
load interval 5
""".strip()
sorted_config = sort_config(orig_config.split('\n'))
self.assertEqual('\n'.join(sorted_config), exp_config, '')
def test_duplicate_sub_level(self):
orig_config = """
interface eth 3
load interval 5
interface eth 1
ip address 1.1.1.1/32
description "eth1 rules"
load interval 5
description "eth1 rules"
interface eth 3
load interval 5
ip address 3.3.3.3/32
"""
exp_config = """
interface eth 1
description "eth1 rules"
ip address 1.1.1.1/32
load interval 5
interface eth 3
ip address 3.3.3.3/32
load interval 5
""".strip()
sorted_config = sort_config(orig_config.split('\n'))
self.assertEqual('\n'.join(sorted_config), exp_config, '')
def test_multi_level(self):
orig_config = """
interface eth 1
ip address 1.1.1.1/32
description "eth1 rules"
load interval 5
ip ospf
passive
area 0
interface eth 3
load interval 5
ip address 3.3.3.3/32
ip ospf
passive
nssa
area 100
"""
exp_config = """
interface eth 1
description "eth1 rules"
ip address 1.1.1.1/32
ip ospf
area 0
passive
load interval 5
interface eth 3
ip address 3.3.3.3/32
ip ospf
area 100
nssa
passive
load interval 5
""".strip()
sorted_config = sort_config(orig_config.split('\n'))
self.assertEqual('\n'.join(sorted_config), exp_config, '')
def test_multi_level_dup(self):
orig_config = """
interface eth 3
description "eth3 forever"
ip ospf
passive
auth-key none
interface eth 1
ip address 1.1.1.1/32
description "eth1 rules"
load interval 5
ip ospf
passive
area 0
interface eth 3
load interval 5
ip address 3.3.3.3/32
ip ospf
passive
nssa
area 100
"""
exp_config = """
interface eth 1
description "eth1 rules"
ip address 1.1.1.1/32
ip ospf
area 0
passive
load interval 5
interface eth 3
description "eth3 forever"
ip address 3.3.3.3/32
ip ospf
area 100
auth-key none
nssa
passive
load interval 5
""".strip()
sorted_config = sort_config(orig_config.split('\n'))
self.assertEqual('\n'.join(sorted_config), exp_config, '')
if __name__ == '__main__':
unittest.main()
| 21.204969
| 66
| 0.643527
| 524
| 3,414
| 4.072519
| 0.118321
| 0.028116
| 0.127929
| 0.051546
| 0.888941
| 0.888941
| 0.86926
| 0.841612
| 0.841612
| 0.841612
| 0
| 0.076438
| 0.256591
| 3,414
| 160
| 67
| 21.3375
| 0.764381
| 0.006151
| 0
| 0.926667
| 0
| 0
| 0.609145
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 1
| 0.033333
| false
| 0.06
| 0.02
| 0
| 0.06
| 0.006667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
abf490cf5022c6c5f46b5f3b069bc520d3ec4fbf
| 42,195
|
py
|
Python
|
tests/test_supertree.py
|
jdvelasq/hardDecisions
|
54f886e82784c4061200d843841ef600b0ac366b
|
[
"MIT"
] | null | null | null |
tests/test_supertree.py
|
jdvelasq/hardDecisions
|
54f886e82784c4061200d843841ef600b0ac366b
|
[
"MIT"
] | null | null | null |
tests/test_supertree.py
|
jdvelasq/hardDecisions
|
54f886e82784c4061200d843841ef600b0ac366b
|
[
"MIT"
] | 1
|
2021-07-17T19:27:54.000Z
|
2021-07-17T19:27:54.000Z
|
"""
>>> from hardDecisions.decisiontree import *
>>> tree = DecisionTree()
>>> tree.decision_node(name='BID',
... branches=[(500, 1),
... (700, 1)],
... max=True)
>>> tree.chance_node(name='COMPBID',
... branches=[(35.0, 400, 2),
... (50.0, 600, 2),
... (15.0, 800, 2)])
>>> tree.chance_node(name='COST',
... branches=[(25.0, 200, 3),
... (50.0, 400, 3),
... (25.0, 600, 3)])
>>> tree.terminal_node(expr='(BID-COST) * (1 if BID < COMPBID else 0)')
>>> tree.display_nodes() # doctest: +NORMALIZE_WHITESPACE
Node 0
Type: DECISION - Maximum Payoff
Name: BID
Branches:
Value Next Node
500.000 1
700.000 1
<BLANKLINE>
Node 1
Type: CHANCE
Name: COMPBID
Branches:
Chance Value Next Node
35.00 400.000 2
50.00 600.000 2
15.00 800.000 2
<BLANKLINE>
Node 2
Type: CHANCE
Name: COST
Branches:
Chance Value Next Node
25.00 200.000 3
50.00 400.000 3
25.00 600.000 3
<BLANKLINE>
Node 3
Type: TERMINAL
Expr: (BID-COST) * (1 if BID < COMPBID else 0)
<BLANKLINE>
>>> tree.build_tree()
>>> tree.display_tree() # doctest: +NORMALIZE_WHITESPACE
|
| #0
\-------[D]
|
| #1
| BID=500
+-------[C]
| |
| | #2
| | COMPBID=400
| | Prob=35.00
| +-------[C]
| | |
| | | #3
| | | COST=200
| | | Prob=25.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #4
| | | COST=400
| | | Prob=50.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #5
| | | COST=600
| | | Prob=25.00
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #6
| | COMPBID=600
| | Prob=50.00
| +-------[C]
| | |
| | | #7
| | | COST=200
| | | Prob=25.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #8
| | | COST=400
| | | Prob=50.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #9
| | | COST=600
| | | Prob=25.00
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #10
| | COMPBID=800
| | Prob=15.00
| \-------[C]
| |
| | #11
| | COST=200
| | Prob=25.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #12
| | COST=400
| | Prob=50.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #13
| | COST=600
| | Prob=25.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #14
| BID=700
\-------[C]
|
| #15
| COMPBID=400
| Prob=35.00
+-------[C]
| |
| | #16
| | COST=200
| | Prob=25.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #17
| | COST=400
| | Prob=50.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #18
| | COST=600
| | Prob=25.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #19
| COMPBID=600
| Prob=50.00
+-------[C]
| |
| | #20
| | COST=200
| | Prob=25.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #21
| | COST=400
| | Prob=50.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #22
| | COST=600
| | Prob=25.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #23
| COMPBID=800
| Prob=15.00
\-------[C]
|
| #24
| COST=200
| Prob=25.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #25
| COST=400
| Prob=50.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #26
| COST=600
| Prob=25.00
\-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
>>> tree.display_tree(maxdeep=0) # doctest: +NORMALIZE_WHITESPACE
|
| #0
\-------[D]
>>> tree.display_tree(maxdeep=1) # doctest: +NORMALIZE_WHITESPACE
|
| #0
\-------[D]
|
| #1
| BID=500
+-------[C]
|
| #14
| BID=700
\-------[C]
>>> tree.display_tree(maxdeep=2) # doctest: +NORMALIZE_WHITESPACE
|
| #0
\-------[D]
|
| #1
| BID=500
+-------[C]
| |
| | #2
| | COMPBID=400
| | Prob=35.00
| +-------[C]
| |
| | #6
| | COMPBID=600
| | Prob=50.00
| +-------[C]
| |
| | #10
| | COMPBID=800
| | Prob=15.00
| \-------[C]
|
| #14
| BID=700
\-------[C]
|
| #15
| COMPBID=400
| Prob=35.00
+-------[C]
|
| #19
| COMPBID=600
| Prob=50.00
+-------[C]
|
| #23
| COMPBID=800
| Prob=15.00
\-------[C]
>>> tree.display_tree(maxdeep=3) # doctest: +NORMALIZE_WHITESPACE
|
| #0
\-------[D]
|
| #1
| BID=500
+-------[C]
| |
| | #2
| | COMPBID=400
| | Prob=35.00
| +-------[C]
| | |
| | | #3
| | | COST=200
| | | Prob=25.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #4
| | | COST=400
| | | Prob=50.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #5
| | | COST=600
| | | Prob=25.00
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #6
| | COMPBID=600
| | Prob=50.00
| +-------[C]
| | |
| | | #7
| | | COST=200
| | | Prob=25.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #8
| | | COST=400
| | | Prob=50.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #9
| | | COST=600
| | | Prob=25.00
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #10
| | COMPBID=800
| | Prob=15.00
| \-------[C]
| |
| | #11
| | COST=200
| | Prob=25.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #12
| | COST=400
| | Prob=50.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #13
| | COST=600
| | Prob=25.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #14
| BID=700
\-------[C]
|
| #15
| COMPBID=400
| Prob=35.00
+-------[C]
| |
| | #16
| | COST=200
| | Prob=25.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #17
| | COST=400
| | Prob=50.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #18
| | COST=600
| | Prob=25.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #19
| COMPBID=600
| Prob=50.00
+-------[C]
| |
| | #20
| | COST=200
| | Prob=25.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #21
| | COST=400
| | Prob=50.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #22
| | COST=600
| | Prob=25.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #23
| COMPBID=800
| Prob=15.00
\-------[C]
|
| #24
| COST=200
| Prob=25.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #25
| COST=400
| Prob=50.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #26
| COST=600
| Prob=25.00
\-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
>>> tree.evaluate()
>>> tree.display_tree() # doctest: +NORMALIZE_WHITESPACE
|
| #0
| ExpVal=65.00
| (selected strategy)
\-------[D]
|
| #1
| BID=500
| ExpVal=65.00
| (selected strategy)
+-------[C]
| |
| | #2
| | COMPBID=400
| | Prob=35.00
| | ExpVal=0.00
| | (selected strategy)
| +-------[C]
| | |
| | | #3
| | | COST=200
| | | Prob=25.00
| | | PathProb=8.75
| | | ExpVal=0.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #4
| | | COST=400
| | | Prob=50.00
| | | PathProb=17.50
| | | ExpVal=0.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #5
| | | COST=600
| | | Prob=25.00
| | | PathProb=8.75
| | | ExpVal=0.00
| | | (selected strategy)
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #6
| | COMPBID=600
| | Prob=50.00
| | ExpVal=100.00
| | (selected strategy)
| +-------[C]
| | |
| | | #7
| | | COST=200
| | | Prob=25.00
| | | PathProb=12.50
| | | ExpVal=300.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #8
| | | COST=400
| | | Prob=50.00
| | | PathProb=25.00
| | | ExpVal=100.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #9
| | | COST=600
| | | Prob=25.00
| | | PathProb=12.50
| | | ExpVal=-100.00
| | | (selected strategy)
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #10
| | COMPBID=800
| | Prob=15.00
| | ExpVal=100.00
| | (selected strategy)
| \-------[C]
| |
| | #11
| | COST=200
| | Prob=25.00
| | PathProb=3.75
| | ExpVal=300.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #12
| | COST=400
| | Prob=50.00
| | PathProb=7.50
| | ExpVal=100.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #13
| | COST=600
| | Prob=25.00
| | PathProb=3.75
| | ExpVal=-100.00
| | (selected strategy)
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #14
| BID=700
| ExpVal=45.00
\-------[C]
|
| #15
| COMPBID=400
| Prob=35.00
| ExpVal=0.00
+-------[C]
| |
| | #16
| | COST=200
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #17
| | COST=400
| | Prob=50.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #18
| | COST=600
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #19
| COMPBID=600
| Prob=50.00
| ExpVal=0.00
+-------[C]
| |
| | #20
| | COST=200
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #21
| | COST=400
| | Prob=50.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #22
| | COST=600
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #23
| COMPBID=800
| Prob=15.00
| ExpVal=300.00
\-------[C]
|
| #24
| COST=200
| Prob=25.00
| PathProb=0.00
| ExpVal=500.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #25
| COST=400
| Prob=50.00
| PathProb=0.00
| ExpVal=300.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #26
| COST=600
| Prob=25.00
| PathProb=0.00
| ExpVal=100.00
\-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
>>> tree.display_tree(selected_strategy=True) # doctest: +NORMALIZE_WHITESPACE
|
| #0
| ExpVal=65.00
| (selected strategy)
\-------[D]
|
| #1
| BID=500
| ExpVal=65.00
| (selected strategy)
\-------[C]
|
| #2
| COMPBID=400
| Prob=35.00
| ExpVal=0.00
| (selected strategy)
+-------[C]
| |
| | #3
| | COST=200
| | Prob=25.00
| | PathProb=8.75
| | ExpVal=0.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #4
| | COST=400
| | Prob=50.00
| | PathProb=17.50
| | ExpVal=0.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #5
| | COST=600
| | Prob=25.00
| | PathProb=8.75
| | ExpVal=0.00
| | (selected strategy)
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #6
| COMPBID=600
| Prob=50.00
| ExpVal=100.00
| (selected strategy)
+-------[C]
| |
| | #7
| | COST=200
| | Prob=25.00
| | PathProb=12.50
| | ExpVal=300.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #8
| | COST=400
| | Prob=50.00
| | PathProb=25.00
| | ExpVal=100.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #9
| | COST=600
| | Prob=25.00
| | PathProb=12.50
| | ExpVal=-100.00
| | (selected strategy)
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #10
| COMPBID=800
| Prob=15.00
| ExpVal=100.00
| (selected strategy)
\-------[C]
|
| #11
| COST=200
| Prob=25.00
| PathProb=3.75
| ExpVal=300.00
| (selected strategy)
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #12
| COST=400
| Prob=50.00
| PathProb=7.50
| ExpVal=100.00
| (selected strategy)
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #13
| COST=600
| Prob=25.00
| PathProb=3.75
| ExpVal=-100.00
| (selected strategy)
\-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
>>> tree.compute_risk_profile()
>>> tree.display_tree() # doctest: +NORMALIZE_WHITESPACE
|
| #0
| ExpVal=65.00
| Risk Profile:
| Value Prob
| -100.00 16.25
| 0.00 35.00
| 100.00 32.50
| 300.00 16.25
| (selected strategy)
\-------[D]
|
| #1
| BID=500
| ExpVal=65.00
| Risk Profile:
| Value Prob
| -100.00 16.25
| 0.00 35.00
| 100.00 32.50
| 300.00 16.25
| (selected strategy)
+-------[C]
| |
| | #2
| | COMPBID=400
| | Prob=35.00
| | ExpVal=0.00
| | Risk Profile:
| | Value Prob
| | 0.00 35.00
| | (selected strategy)
| +-------[C]
| | |
| | | #3
| | | COST=200
| | | Prob=25.00
| | | PathProb=8.75
| | | ExpVal=0.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #4
| | | COST=400
| | | Prob=50.00
| | | PathProb=17.50
| | | ExpVal=0.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #5
| | | COST=600
| | | Prob=25.00
| | | PathProb=8.75
| | | ExpVal=0.00
| | | (selected strategy)
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #6
| | COMPBID=600
| | Prob=50.00
| | ExpVal=100.00
| | Risk Profile:
| | Value Prob
| | -100.00 12.50
| | 100.00 25.00
| | 300.00 12.50
| | (selected strategy)
| +-------[C]
| | |
| | | #7
| | | COST=200
| | | Prob=25.00
| | | PathProb=12.50
| | | ExpVal=300.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #8
| | | COST=400
| | | Prob=50.00
| | | PathProb=25.00
| | | ExpVal=100.00
| | | (selected strategy)
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #9
| | | COST=600
| | | Prob=25.00
| | | PathProb=12.50
| | | ExpVal=-100.00
| | | (selected strategy)
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #10
| | COMPBID=800
| | Prob=15.00
| | ExpVal=100.00
| | Risk Profile:
| | Value Prob
| | -100.00 3.75
| | 100.00 7.50
| | 300.00 3.75
| | (selected strategy)
| \-------[C]
| |
| | #11
| | COST=200
| | Prob=25.00
| | PathProb=3.75
| | ExpVal=300.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #12
| | COST=400
| | Prob=50.00
| | PathProb=7.50
| | ExpVal=100.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #13
| | COST=600
| | Prob=25.00
| | PathProb=3.75
| | ExpVal=-100.00
| | (selected strategy)
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #14
| BID=700
| ExpVal=45.00
\-------[C]
|
| #15
| COMPBID=400
| Prob=35.00
| ExpVal=0.00
+-------[C]
| |
| | #16
| | COST=200
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #17
| | COST=400
| | Prob=50.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #18
| | COST=600
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #19
| COMPBID=600
| Prob=50.00
| ExpVal=0.00
+-------[C]
| |
| | #20
| | COST=200
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #21
| | COST=400
| | Prob=50.00
| | PathProb=0.00
| | ExpVal=0.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #22
| | COST=600
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=0.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #23
| COMPBID=800
| Prob=15.00
| ExpVal=300.00
\-------[C]
|
| #24
| COST=200
| Prob=25.00
| PathProb=0.00
| ExpVal=500.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #25
| COST=400
| Prob=50.00
| PathProb=0.00
| ExpVal=300.00
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #26
| COST=600
| Prob=25.00
| PathProb=0.00
| ExpVal=100.00
\-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
Probabilistic senstitivity
>>> b500 = []
>>> b700 = []
>>> for p in range(0, 101, 10):
... tree.data[2]['branches'] = [(p, 200, 3), (0.0, 400, 3), (100-p, 600, 3)]
... tree.build_tree()
... tree.evaluate()
... b500.append(tree.tree[1]['ExpVal'])
... b700.append(tree.tree[14]['ExpVal'])
>>> tree.data[2]['branches'] = [(25, 200, 3), (50, 400, 3), (25, 600, 3)]
>>> tree.build_tree()
>>> tree.evaluate()
>>> b500
[-65.0, -39.0, -13.0, 13.0, 39.0, 65.0, 91.0, 117.0, 143.0, 169.0, 195.0]
>>> b700
[15.0, 21.0, 27.0, 33.0, 39.0, 45.0, 51.0, 57.0, 63.0, 69.0, 75.0]
>>> tree.use_utility_function(exponential=True, R=100)
>>> tree.evaluate()
>>> tree.display_tree() # doctest: +NORMALIZE_WHITESPACE
|
| #0
| ExpVal=45.00
| ExpUtl=0.13
| CE=14.18
| (selected strategy)
\-------[D]
|
| #1
| BID=500
| ExpVal=65.00
| ExpUtl=0.08
| CE=8.41
+-------[C]
| |
| | #2
| | COMPBID=400
| | Prob=35.00
| | ExpVal=0.00
| | ExpUtl=0.00
| | CE=-0.00
| +-------[C]
| | |
| | | #3
| | | COST=200
| | | Prob=25.00
| | | PathProb=0.00
| | | ExpVal=0.00
| | | ExpUtl=0.00
| | | CE=0.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #4
| | | COST=400
| | | Prob=50.00
| | | PathProb=0.00
| | | ExpVal=0.00
| | | ExpUtl=0.00
| | | CE=0.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #5
| | | COST=600
| | | Prob=25.00
| | | PathProb=0.00
| | | ExpVal=0.00
| | | ExpUtl=0.00
| | | CE=0.00
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #6
| | COMPBID=600
| | Prob=50.00
| | ExpVal=100.00
| | ExpUtl=0.12
| | CE=13.24
| +-------[C]
| | |
| | | #7
| | | COST=200
| | | Prob=25.00
| | | PathProb=0.00
| | | ExpVal=300.00
| | | ExpUtl=0.95
| | | CE=300.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #8
| | | COST=400
| | | Prob=50.00
| | | PathProb=0.00
| | | ExpVal=100.00
| | | ExpUtl=0.63
| | | CE=100.00
| | +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| | |
| | | #9
| | | COST=600
| | | Prob=25.00
| | | PathProb=0.00
| | | ExpVal=-100.00
| | | ExpUtl=-1.72
| | | CE=-100.00
| | \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #10
| | COMPBID=800
| | Prob=15.00
| | ExpVal=100.00
| | ExpUtl=0.12
| | CE=13.24
| \-------[C]
| |
| | #11
| | COST=200
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=300.00
| | ExpUtl=0.95
| | CE=300.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #12
| | COST=400
| | Prob=50.00
| | PathProb=0.00
| | ExpVal=100.00
| | ExpUtl=0.63
| | CE=100.00
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #13
| | COST=600
| | Prob=25.00
| | PathProb=0.00
| | ExpVal=-100.00
| | ExpUtl=-1.72
| | CE=-100.00
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #14
| BID=700
| ExpVal=45.00
| ExpUtl=0.13
| CE=14.18
| (selected strategy)
\-------[C]
|
| #15
| COMPBID=400
| Prob=35.00
| ExpVal=0.00
| ExpUtl=0.00
| CE=-0.00
| (selected strategy)
+-------[C]
| |
| | #16
| | COST=200
| | Prob=25.00
| | PathProb=8.75
| | ExpVal=0.00
| | ExpUtl=0.00
| | CE=0.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #17
| | COST=400
| | Prob=50.00
| | PathProb=17.50
| | ExpVal=0.00
| | ExpUtl=0.00
| | CE=0.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #18
| | COST=600
| | Prob=25.00
| | PathProb=8.75
| | ExpVal=0.00
| | ExpUtl=0.00
| | CE=0.00
| | (selected strategy)
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #19
| COMPBID=600
| Prob=50.00
| ExpVal=0.00
| ExpUtl=0.00
| CE=-0.00
| (selected strategy)
+-------[C]
| |
| | #20
| | COST=200
| | Prob=25.00
| | PathProb=12.50
| | ExpVal=0.00
| | ExpUtl=0.00
| | CE=0.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #21
| | COST=400
| | Prob=50.00
| | PathProb=25.00
| | ExpVal=0.00
| | ExpUtl=0.00
| | CE=0.00
| | (selected strategy)
| +-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
| |
| | #22
| | COST=600
| | Prob=25.00
| | PathProb=12.50
| | ExpVal=0.00
| | ExpUtl=0.00
| | CE=0.00
| | (selected strategy)
| \-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #23
| COMPBID=800
| Prob=15.00
| ExpVal=300.00
| ExpUtl=0.88
| CE=213.24
| (selected strategy)
\-------[C]
|
| #24
| COST=200
| Prob=25.00
| PathProb=3.75
| ExpVal=500.00
| ExpUtl=0.99
| CE=500.00
| (selected strategy)
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #25
| COST=400
| Prob=50.00
| PathProb=7.50
| ExpVal=300.00
| ExpUtl=0.95
| CE=300.00
| (selected strategy)
+-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
|
| #26
| COST=600
| Prob=25.00
| PathProb=3.75
| ExpVal=100.00
| ExpUtl=0.63
| CE=100.00
| (selected strategy)
\-------[T] (BID-COST) * (1 if BID < COMPBID else 0)
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
| 35.074813
| 86
| 0.233867
| 3,021
| 42,195
| 3.253227
| 0.041708
| 0.071937
| 0.082214
| 0.102768
| 0.899369
| 0.883598
| 0.882886
| 0.872914
| 0.864978
| 0.847477
| 0
| 0.168847
| 0.628605
| 42,195
| 1,202
| 87
| 35.103993
| 0.458299
| 0.998033
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
f9faba76c38b3254765fe464a8344c08462c7026
| 22,407
|
py
|
Python
|
tests/TestCasXmiParser.py
|
TalnUPF/dkpro-pycas
|
a44928742755c11bad175c64de5c6e39455723a7
|
[
"Apache-2.0"
] | 5
|
2017-08-18T04:00:08.000Z
|
2018-06-01T12:57:21.000Z
|
tests/TestCasXmiParser.py
|
TalnUPF/dkpro-pycas
|
a44928742755c11bad175c64de5c6e39455723a7
|
[
"Apache-2.0"
] | 7
|
2018-05-14T10:44:47.000Z
|
2019-11-04T12:13:02.000Z
|
tests/TestCasXmiParser.py
|
TalnUPF/dkpro-pycas
|
a44928742755c11bad175c64de5c6e39455723a7
|
[
"Apache-2.0"
] | 4
|
2018-03-23T15:28:39.000Z
|
2019-03-26T14:52:11.000Z
|
'''
Created on Dec 12, 2016
unit test cases to check CasXmiParser functionality
@author: Dibyojyoti
'''
import unittest2 as unittest
from pycas.cas.parse.CasXmiParser import CasXmiParser
from lxml import etree
class TestCasXmiParserMethods(unittest.TestCase):
def test_witeXmiAsFile(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
#provide proper path before running
"""casxmi.witeXmiAsFile('C:/Users/Dibyojyoti/Desktop/scriptop/document1_op.txt.xmi')
casxmi1 = CasXmiParser()
casxmi1.setXmiAsFile('C:/Users/Dibyojyoti/Desktop/scriptop/document1_op.txt.xmi')
self.assertEqual(casxmi.getXmiAsString(),casxmi1.getXmiAsString())
"""
def test_root(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
casxmi1 = CasXmiParser()
self.assertEqual(casxmi1.getTag(casxmi.getRootElement()),'{http://www.omg.org/XMI}XMI')
self.assertEqual(casxmi1.getLocalname(casxmi.getRootElement()),'XMI')
self.assertEqual(casxmi1.getNamespace(casxmi.getRootElement()),'http://www.omg.org/XMI')
self.assertEqual(casxmi.getRootTagAsDict(), {'XMI': 'http://www.omg.org/XMI'})
self.assertEqual(casxmi.getRootAttribteNames(), ['{http://www.omg.org/XMI}version'])
self.assertEqual(casxmi.getRootAttributesAsDict(),{'{http://www.omg.org/XMI}version': '2.0'})
self.assertEqual(casxmi.getRootAttributesAsList(),[('{http://www.omg.org/XMI}version', '2.0')])
def test_casnull(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<cas:NULL xmlns:cas="http:///uima/cas.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="0"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(casxmi.getCasNullChild()),str1)
def test_cassofa(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<cas:Sofa xmlns:cas="http:///uima/cas.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="878" sofaNum="1" sofaID="_InitialView" mimeType="text" sofaString="These steps install the basis system requirements needed to implement DKPro Core pipelines using the Java language. They need to be performed only once."/>\n\n'''
self.assertEqual(casxmi.convertChildToString(casxmi.getCasSofaChild()),str1)
def test_casview(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<cas:View xmlns:cas="http:///uima/cas.ecore" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" sofa="878" members="91 102 107 112 122 132 142 152 162 172 182 192 202 212 222 232 242 252 262 272 282 292 302 312 322 332 342 352 362 372 485 426 540 545 550 555 560 565 570 575 580 585 590 595 600 605 610 615 620 625 630 635 640 645 650 655 660 665 670 678 686 694 702 710 718 726 734 742 750 758 766 774 782 790 798 806 814 822 830 838 846 854 862 870"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(casxmi.getCasViewChild()),str1)
"print(casxmi.getCasChildren())"
"print(casxmi.getNonCasChildren())"
def test_findChildByLocalname(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1 = '''<type6:Sentence xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:cas="http:///uima/cas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="102" sofa="878" begin="0" end="115"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(casxmi.findChildByLocalname('Sentence')),str1)
def test_findChildByTag(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<type6:Sentence xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:cas="http:///uima/cas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="102" sofa="878" begin="0" end="115"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(
casxmi.findChildByTag('{http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore}Sentence')),str1)
def test_findChildByNamespace(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<type6:Sentence xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:cas="http:///uima/cas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="102" sofa="878" begin="0" end="115"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(
casxmi.findChildByNamespace('http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore')),str1)
def test_findChildrenByLocalname(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<type6:Sentence xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:cas="http:///uima/cas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="102" sofa="878" begin="0" end="115"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(casxmi.findChildrenByLocalname('Sentence')[0]),str1)
def test_findChildrenByTag(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<type6:Sentence xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:cas="http:///uima/cas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="102" sofa="878" begin="0" end="115"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(
casxmi.findChildrenByTag('{http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore}Sentence')[0]),str1)
def test_findChildrenByNamespace(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<type6:Sentence xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:cas="http:///uima/cas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="102" sofa="878" begin="0" end="115"/>\n\n'''
self.assertEqual(casxmi.convertChildToString(
casxmi.findChildrenByNamespace('http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore')[0]),str1)
def test_getChildAttributesAsDict(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
self.assertEqual(casxmi.getChildAttributesAsDict(casxmi.findChildByLocalname('Sentence')),
{'end': '115', 'sofa': '878', '{http://www.omg.org/XMI}id': '102', 'begin': '0'})
def test_findChildByAttribute(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
str1='''<type4:DocumentMetaData xmlns:type4="http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore" xmlns:xmi="http://www.omg.org/XMI" xmlns:pos="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos.ecore" xmlns:tcas="http:///uima/tcas.ecore" xmlns:cas="http:///uima/cas.ecore" xmlns:tweet="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/pos/tweet.ecore" xmlns:morph="http:///de/tudarmstadt/ukp/dkpro/core/api/lexmorph/type/morph.ecore" xmlns:type3="http:///de/tudarmstadt/ukp/dkpro/core/api/frequency/tfidf/type.ecore" xmlns:dependency="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/dependency.ecore" xmlns:type7="http:///de/tudarmstadt/ukp/dkpro/core/api/semantics/type.ecore" xmlns:type9="http:///de/tudarmstadt/ukp/dkpro/core/api/transform/type.ecore" xmlns:type="http:///de/tudarmstadt/ukp/dkpro/core/api/anomaly/type.ecore" xmlns:type8="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type.ecore" xmlns:type5="http:///de/tudarmstadt/ukp/dkpro/core/api/ner/type.ecore" xmlns:type6="http:///de/tudarmstadt/ukp/dkpro/core/api/segmentation/type.ecore" xmlns:type2="http:///de/tudarmstadt/ukp/dkpro/core/api/coref/type.ecore" xmlns:constituent="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/constituent.ecore" xmlns:chunk="http:///de/tudarmstadt/ukp/dkpro/core/api/syntax/type/chunk.ecore" xmi:id="91" sofa="878" begin="0" end="152" language="en" documentTitle="document1.txt" documentId="document1.txt" documentUri="file:/C:/Users/Dibyojyoti/Desktop/scriptip/document1.txt" collectionId="file:/C:/Users/Dibyojyoti/Desktop/scriptip/" documentBaseUri="file:/C:/Users/Dibyojyoti/Desktop/scriptip/" isLastSegment="false"/>\n'''
self.assertEqual(etree.tostring(casxmi.findChildByAttribute('sofa')).decode("utf-8"),str1)
def test_getChildTagAsDict(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
self.assertEqual(casxmi.getRootElement().items()[0],('{http://www.omg.org/XMI}version', '2.0'))
self.assertEqual(casxmi.getChildTagAsDict(casxmi.getCasSofaChild()),{'Sofa': 'http:///uima/cas.ecore'})
def test_getChildAttribteNames(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
for child in casxmi.getRootElement():
self.assertEqual(casxmi.getTag(child),'{http:///uima/cas.ecore}NULL')
self.assertEqual(casxmi.getChildIndex(child), 0)
for name, value in casxmi.getChildAttributesAsList(child):
self.assertEqual(name,'{http://www.omg.org/XMI}id');
self.assertEqual(value,'0');
break
def test_others(self):
casxmi = CasXmiParser()
casxmi.setXmiAsFile('tests/testing_data/document1.txt.xmi')
"""
print('all attributes in root');
print(casxmi.getRootKeys);
for name, value in casxmi.getRootAttributesAsList():
if(casxmi.getNamespace(name) !=None):
print(casxmi.getNamespace(name),':',casxmi.getLocalname(name),' = ',value)
else:
print(name,'=',value);
"""
"""
elementType="{http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore}TagDescription"
"""
"""
elementType="{http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore}TagsetDescription"
"""
"""
elementType="{http:///de/tudarmstadt/ukp/dkpro/core/api/metadata/type.ecore}DocumentMetaData"
"""
"""
elementType="cas"
"""
"""
print('filter tags ');
for child in casxmi.getChildElementsFilterBytag(elementType):
print('******child element =',casxmi.getChildTag(child),' at index : ',casxmi.getChildIndex(child),' ******')
print('tagname: ',casxmi.getLocalname(child.tag),' namespace: ',casxmi.getNamespace(child.tag))
print(sorted(child.keys()));
for name, value in casxmi.getChildAttributesAsList(child):
if(casxmi.getNamespace(name) !=None):
print(casxmi.getNamespace(name),':',casxmi.getLocalname(name),' = ',value)
else:
print(name,'=',value);
print(' ')
"""
"""
print('return list of attribute name value pairs as a dict ');
list1 = []
loopvar = 0
for child in casxmi.getChildElementsFilterBytag(elementType):
print('******child element =',casxmi.getChildTag(child),' at index : ',casxmi.getChildIndex(child),' ******')
attributes = dict(child.attrib)
list1.append(attributes)
"print(attributes)"
loopvar = loopvar + 1;
print(list1)
for listitem in list:
print(listitem)
for key, value in listitem.items():
print(key,value)
"""
if __name__ == '__main__':
unittest.main()
| 126.59322
| 1,690
| 0.723702
| 3,115
| 22,407
| 5.191974
| 0.097913
| 0.09893
| 0.165028
| 0.194151
| 0.835714
| 0.830211
| 0.819885
| 0.79979
| 0.793668
| 0.786558
| 0
| 0.024395
| 0.088945
| 22,407
| 176
| 1,691
| 127.3125
| 0.767855
| 0.005802
| 0
| 0.424242
| 0
| 0.121212
| 0.787953
| 0.047825
| 0
| 0
| 0
| 0
| 0.242424
| 1
| 0.161616
| false
| 0
| 0.030303
| 0
| 0.20202
| 0.020202
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e61e4c6bada1c5a89ca50610d0b06100421e2504
| 14,975
|
py
|
Python
|
tests/test_dataframe.py
|
questdb/pykit
|
c8aac35ab57b88d422f40126380f11f1d1e2d143
|
[
"Apache-2.0"
] | 7
|
2021-10-04T12:28:17.000Z
|
2022-01-13T16:41:47.000Z
|
tests/test_dataframe.py
|
questdb/pykit
|
c8aac35ab57b88d422f40126380f11f1d1e2d143
|
[
"Apache-2.0"
] | null | null | null |
tests/test_dataframe.py
|
questdb/pykit
|
c8aac35ab57b88d422f40126380f11f1d1e2d143
|
[
"Apache-2.0"
] | 1
|
2022-02-10T05:53:24.000Z
|
2022-02-10T05:53:24.000Z
|
#
# ___ _ ____ ____
# / _ \ _ _ ___ ___| |_| _ \| __ )
# | | | | | | |/ _ \/ __| __| | | | _ \
# | |_| | |_| | __/\__ \ |_| |_| | |_) |
# \__\_\\__,_|\___||___/\__|____/|____/
#
# Copyright (c) 2014-2019 Appsicle
# Copyright (c) 2019-2020 QuestDB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import math
import os
import numpy as np
import pandas as pd
from pykit import (
create_table,
insert_values,
drop_table,
to_timestamp,
df_from_table
)
from tests.util import BaseTestTest
class DataFrameFromTablesTest(BaseTestTest):
def test_read_only(self):
def assert_read_only(iloc):
try:
iloc[0] = 999 # Should be non-editable
self.fail("MUST BE READ ONLY (1)")
except ValueError as e:
self.assertEqual('assignment destination is read-only', str(e))
table_name = 'test_read_only'
columns = (('int', 'INT'), ('double', 'DOUBLE'), ('ts', 'TIMESTAMP'))
drop_table(table_name)
create_table(table_name, columns, designated='ts')
try:
insert_values(
table_name,
columns,
(0, 1.000001, to_timestamp('2021-10-01 02:00:00.123456')),
(1, 2.002002, to_timestamp('2021-10-01 02:01:00.123456')),
(2, 4.404404, to_timestamp('2021-10-02 02:02:00.123456')))
snapshot_before_df = self.take_mem_snapshot()
df = df_from_table(table_name, columns)
assert_read_only(df["int"].iloc)
assert_read_only(df["double"].iloc)
assert_read_only(df.index.array)
try:
df["ts"].iloc
except KeyError as ke:
self.assertEqual("'ts'", str(ke))
finally:
self.report_mem_snapshot_diff(snapshot_before_df)
drop_table(table_name)
def test_no_partitions(self):
table_name = 'test_no_partitions'
columns = (
('int', 'INT'),
('double', 'DOUBLE'),
('ts', 'TIMESTAMP'))
drop_table(table_name)
create_table(table_name, columns, designated='ts', partition_by=None)
try:
insert_values(
table_name,
columns,
(0, 1.000001, to_timestamp('2021-10-01 02:00:00.123456')),
(1, 2.002002, to_timestamp('2021-10-01 02:01:00.123456')),
(2, 4.404404, to_timestamp('2021-10-02 02:02:00.123456')),
(3, 22 / 7, to_timestamp('2021-10-02 02:03:00.123456')),
(4, 0.798117, to_timestamp('2021-10-03 02:04:00.123456')),
(5, math.sqrt(2), to_timestamp('2021-10-03 02:05:00.123456')),
(6, math.sin(math.radians(45.0)), to_timestamp('2021-10-03 02:06:00.123456'))
)
self.assert_table_content(
table_name,
'(0, 1.000001, datetime.datetime(2021, 10, 1, 2, 0, 0, 123456))' + os.linesep +
'(1, 2.002002, datetime.datetime(2021, 10, 1, 2, 1, 0, 123456))' + os.linesep +
'(2, 4.4044039999999995, datetime.datetime(2021, 10, 2, 2, 2, 0, 123456))' + os.linesep +
'(3, 3.1428571428571432, datetime.datetime(2021, 10, 2, 2, 3, 0, 123456))' + os.linesep +
'(4, 0.798117, datetime.datetime(2021, 10, 3, 2, 4, 0, 123456))' + os.linesep +
'(5, 1.4142135623730951, datetime.datetime(2021, 10, 3, 2, 5, 0, 123456))' + os.linesep +
'(6, 0.7071067811865475, datetime.datetime(2021, 10, 3, 2, 6, 0, 123456))' + os.linesep)
snapshot_before_df = self.take_mem_snapshot()
df = df_from_table(table_name, columns)
pd.set_option('display.width', 800)
pd.set_option('max_columns', 4)
self.assertEqual(
' int double' + os.linesep +
'ts ' + os.linesep +
'1633053600123456 0 1.000001' + os.linesep +
'1633053660123456 1 2.002002' + os.linesep +
'1633140120123456 2 4.404404' + os.linesep +
'1633140180123456 3 3.142857' + os.linesep +
'1633226640123456 4 0.798117' + os.linesep +
'1633226700123456 5 1.414214' + os.linesep +
'1633226760123456 6 0.707107',
str(df))
self.assertEqual("Index(['int', 'double'], dtype='object')", str(df.columns))
self.assertEqual((7, 2), df.shape)
self.assertEqual(7, len(df))
finally:
self.report_mem_snapshot_diff(snapshot_before_df)
drop_table(table_name)
def test_with_partitions(self):
table_name = 'test_with_partitions'
columns = (
('int', 'INT'),
('double', 'DOUBLE'),
('ts', 'TIMESTAMP'))
drop_table(table_name)
create_table(table_name, columns, designated='ts', partition_by='DAY')
try:
insert_values(
table_name,
columns,
(0, 1.000001, to_timestamp('2021-10-01 02:00:00.123456')),
(1, 2.002002, to_timestamp('2021-10-01 02:01:00.123456')),
(2, 4.404404, to_timestamp('2021-10-02 02:02:00.123456')),
(3, 22 / 7, to_timestamp('2021-10-01 02:03:00.123456')),
(4, 0.798117, to_timestamp('2021-10-02 02:04:00.123456')),
(5, math.sqrt(2), to_timestamp('2021-10-03 02:05:00.123456')),
(6, math.sin(math.radians(45.0)), to_timestamp('2021-10-01 02:06:00.123456'))
)
self.assert_table_content(
table_name,
'(0, 1.000001, datetime.datetime(2021, 10, 1, 2, 0, 0, 123456))' + os.linesep +
'(1, 2.002002, datetime.datetime(2021, 10, 1, 2, 1, 0, 123456))' + os.linesep +
'(3, 3.1428571428571432, datetime.datetime(2021, 10, 1, 2, 3, 0, 123456))' + os.linesep +
'(6, 0.7071067811865475, datetime.datetime(2021, 10, 1, 2, 6, 0, 123456))' + os.linesep +
'(2, 4.4044039999999995, datetime.datetime(2021, 10, 2, 2, 2, 0, 123456))' + os.linesep +
'(4, 0.798117, datetime.datetime(2021, 10, 2, 2, 4, 0, 123456))' + os.linesep +
'(5, 1.4142135623730951, datetime.datetime(2021, 10, 3, 2, 5, 0, 123456))' + os.linesep)
snapshot_before_df = self.take_mem_snapshot()
df = df_from_table(table_name, columns)
pd.set_option('display.width', 800)
pd.set_option('max_columns', 4)
self.assertEqual(
' int double' + os.linesep +
'ts ' + os.linesep +
'1633053600123456 0 1.000001' + os.linesep +
'1633053660123456 1 2.002002' + os.linesep +
'1633053780123456 3 3.142857' + os.linesep +
'1633053960123456 6 0.707107' + os.linesep +
'1633140120123456 2 4.404404' + os.linesep +
'1633140240123456 4 0.798117' + os.linesep +
'1633226700123456 5 1.414214',
str(df))
self.assertEqual("Index(['int', 'double'], dtype='object')", str(df.columns))
self.assertEqual((7, 2), df.shape)
self.assertEqual(7, len(df))
finally:
self.report_mem_snapshot_diff(snapshot_before_df)
drop_table(table_name)
def test_no_index(self):
table_name = 'test_no_index'
columns = (
('int', 'INT'),
('double', 'DOUBLE'),
('ts', 'TIMESTAMP'))
drop_table(table_name)
create_table(table_name, columns)
try:
insert_values(
table_name,
columns,
(0, 1.000001, to_timestamp('2021-10-01 02:00:00.123456')),
(1, 2.002002, to_timestamp('2021-10-01 02:01:00.123456')),
(2, 4.404404, to_timestamp('2021-10-02 02:02:00.123456')),
(3, 22 / 7, to_timestamp('2021-10-02 02:03:00.123456')),
(4, 0.798117, to_timestamp('2021-10-03 02:04:00.123456')),
(5, math.sqrt(2), to_timestamp('2021-10-03 02:05:00.123456')),
(6, math.sin(math.radians(45.0)), to_timestamp('2021-10-03 02:06:00.123456'))
)
self.assert_table_content(
table_name,
'(0, 1.000001, datetime.datetime(2021, 10, 1, 2, 0, 0, 123456))' + os.linesep +
'(1, 2.002002, datetime.datetime(2021, 10, 1, 2, 1, 0, 123456))' + os.linesep +
'(2, 4.4044039999999995, datetime.datetime(2021, 10, 2, 2, 2, 0, 123456))' + os.linesep +
'(3, 3.1428571428571432, datetime.datetime(2021, 10, 2, 2, 3, 0, 123456))' + os.linesep +
'(4, 0.798117, datetime.datetime(2021, 10, 3, 2, 4, 0, 123456))' + os.linesep +
'(5, 1.4142135623730951, datetime.datetime(2021, 10, 3, 2, 5, 0, 123456))' + os.linesep +
'(6, 0.7071067811865475, datetime.datetime(2021, 10, 3, 2, 6, 0, 123456))' + os.linesep)
snapshot_before_df = self.take_mem_snapshot()
df = df_from_table(table_name, columns)
pd.set_option('display.width', 800)
pd.set_option('max_columns', 4)
self.assertEqual(
' int double ts' + os.linesep +
'Idx ' + os.linesep +
'0 0 1.000001 1633053600123456' + os.linesep +
'1 1 2.002002 1633053660123456' + os.linesep +
'2 2 4.404404 1633140120123456' + os.linesep +
'3 3 3.142857 1633140180123456' + os.linesep +
'4 4 0.798117 1633226640123456' + os.linesep +
'5 5 1.414214 1633226700123456' + os.linesep +
'6 6 0.707107 1633226760123456',
str(df))
self.assertEqual("Index(['int', 'double', 'ts'], dtype='object')", str(df.columns))
self.assertEqual((7, 3), df.shape)
self.assertEqual(7, len(df))
finally:
self.report_mem_snapshot_diff(snapshot_before_df)
drop_table(table_name)
def test_user_index(self):
table_name = 'test_user_index'
columns = (
('int', 'INT'),
('double', 'DOUBLE'),
('ts', 'TIMESTAMP'))
drop_table(table_name)
create_table(table_name, columns)
try:
insert_values(
table_name,
columns,
(0, 1.000001, to_timestamp('2021-10-01 02:00:00.123456')),
(1, 2.002002, to_timestamp('2021-10-01 02:01:00.123456')),
(2, 4.404404, to_timestamp('2021-10-02 02:02:00.123456')),
(3, 22 / 7, to_timestamp('2021-10-02 02:03:00.123456')),
(4, 0.798117, to_timestamp('2021-10-03 02:04:00.123456')),
(5, math.sqrt(2), to_timestamp('2021-10-03 02:05:00.123456')),
(6, math.sin(math.radians(45.0)), to_timestamp('2021-10-03 02:06:00.123456'))
)
self.assert_table_content(
table_name,
'(0, 1.000001, datetime.datetime(2021, 10, 1, 2, 0, 0, 123456))' + os.linesep +
'(1, 2.002002, datetime.datetime(2021, 10, 1, 2, 1, 0, 123456))' + os.linesep +
'(2, 4.4044039999999995, datetime.datetime(2021, 10, 2, 2, 2, 0, 123456))' + os.linesep +
'(3, 3.1428571428571432, datetime.datetime(2021, 10, 2, 2, 3, 0, 123456))' + os.linesep +
'(4, 0.798117, datetime.datetime(2021, 10, 3, 2, 4, 0, 123456))' + os.linesep +
'(5, 1.4142135623730951, datetime.datetime(2021, 10, 3, 2, 5, 0, 123456))' + os.linesep +
'(6, 0.7071067811865475, datetime.datetime(2021, 10, 3, 2, 6, 0, 123456))' + os.linesep)
snapshot_before_df = self.take_mem_snapshot()
df = df_from_table(table_name,
columns,
usr_index=pd.RangeIndex(start=0, stop=14, step=2, dtype=np.int32, name='Idx'))
pd.set_option('display.width', 800)
pd.set_option('max_columns', 4)
self.assertEqual(
' int double ts' + os.linesep +
'Idx ' + os.linesep +
'0 0 1.000001 1633053600123456' + os.linesep +
'2 1 2.002002 1633053660123456' + os.linesep +
'4 2 4.404404 1633140120123456' + os.linesep +
'6 3 3.142857 1633140180123456' + os.linesep +
'8 4 0.798117 1633226640123456' + os.linesep +
'10 5 1.414214 1633226700123456' + os.linesep +
'12 6 0.707107 1633226760123456',
str(df))
self.assertEqual("Index(['int', 'double', 'ts'], dtype='object')", str(df.columns))
self.assertEqual((7, 3), df.shape)
self.assertEqual(7, len(df))
finally:
self.report_mem_snapshot_diff(snapshot_before_df)
drop_table(table_name)
def test_large_table(self):
table_name = 'test_large_table'
columns = (
('long', 'LONG'),
('int', 'INT'),
('boolean', 'BOOLEAN'),
('date', 'DATE'),
('double', 'DOUBLE'),
('ts', 'TIMESTAMP'))
snapshot_start = self.take_mem_snapshot()
drop_table(table_name)
self.create_rnd_table(table_name, num_rows=1000000)
try:
snapshot_before_df = self.report_mem_snapshot_diff(snapshot_start, '\nCREATE TABLE')
df = df_from_table(table_name, columns)
snapshot_after_df = self.report_mem_snapshot_diff(snapshot_before_df, 'DF FROM TABLE')
pd.set_option('display.width', 800)
pd.set_option('max_columns', len(columns))
print(df.tail())
print(df.describe())
finally:
self.report_mem_snapshot_diff(snapshot_after_df, 'SHOW AND TELL')
drop_table(table_name)
| 49.586093
| 109
| 0.527746
| 1,789
| 14,975
| 4.250419
| 0.117943
| 0.071015
| 0.061152
| 0.069306
| 0.807601
| 0.784456
| 0.739348
| 0.725408
| 0.710153
| 0.699369
| 0
| 0.240654
| 0.333756
| 14,975
| 301
| 110
| 49.750831
| 0.521499
| 0.054558
| 0
| 0.682836
| 0
| 0.104478
| 0.327176
| 0.045577
| 0
| 0
| 0
| 0
| 0.097015
| 1
| 0.026119
| false
| 0
| 0.022388
| 0
| 0.052239
| 0.007463
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e63e779ae4fe9f83307508fb8fdddb463cb35bd6
| 5,439
|
py
|
Python
|
django_ecommerce/models.py
|
LarryNgugi/Ecommerce-System
|
ca3729071e6c8e21538940cdbab0691c3b29b166
|
[
"MIT"
] | null | null | null |
django_ecommerce/models.py
|
LarryNgugi/Ecommerce-System
|
ca3729071e6c8e21538940cdbab0691c3b29b166
|
[
"MIT"
] | null | null | null |
django_ecommerce/models.py
|
LarryNgugi/Ecommerce-System
|
ca3729071e6c8e21538940cdbab0691c3b29b166
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Category(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Seller(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
business_name = models.CharField(max_length=100, blank=False, null=False)
business_reg_no = models.CharField(max_length=50, blank=False, null=False)
phone_number = models.IntegerField(blank=False, null=True)
email = models.EmailField(max_length=50, blank=False, null=False)
external_url = models.SlugField(max_length=100, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Customer(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
email = models.EmailField(max_length=50, blank=False, null=False)
password = models.CharField(max_length=50, blank=False, null=False)
phone_number = models.IntegerField(blank=False, null=True)
image = models.ImageField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Product(models.Model):
name = models.CharField(max_length=50, blank=True, null=True)
category = models.ForeignKey(Category, on_delete=models.CASCADE, blank=True, null=False)
image_url = models.ImageField()
price = models.IntegerField(blank=False, null=False)
inventory = models.CharField(max_length=50, blank=False, null=False)
seller = models.ForeignKey(Seller, on_delete=models.CASCADE, blank=False, null=False)
description = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class CustomerAddress(models.Model):
customerId = models.ForeignKey(Customer, on_delete=models.CASCADE, blank=False, null=False)
location = models.CharField(max_length=100, blank=True, null=False)
pin = models.IntegerField(blank=False, null=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Order(models.Model):
customerId = models.ForeignKey(Customer, on_delete=models.CASCADE, blank=False, null=False)
orderNumber = models.CharField(max_length=20, blank=False, null=True)
cost = models.IntegerField(blank=False, null=False)
status = models.CharField(max_length=50, null=False, blank=True)
shippingCost = models.IntegerField(blank=True, null=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class OrderProduct(models.Model):
productId = models.ForeignKey(Product, on_delete=models.CASCADE, blank=False, null=True)
orderId = models.ForeignKey(Order, on_delete=models.CASCADE, blank=False, null=True)
quantity = models.CharField(max_length=20, blank=False, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Shipping(models.Model):
orderId = models.ForeignKey(Order, on_delete=models.CASCADE, blank=False, null=True)
customerAddressId = models.ForeignKey(CustomerAddress, on_delete=models.CASCADE, blank=False, null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Payment(models.Model):
mode = models.CharField(max_length=10, blank=False, null=False)
amount = models.IntegerField(blank=False, null=False)
orderId = models.ForeignKey(Order, on_delete=models.CASCADE, blank=False, null=True)
invoiceNumber = models.CharField(max_length=10, blank=False, null=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Review(models.Model):
# rating = models.
message = models.TextField()
productId = models.ForeignKey(Product, on_delete=models.CASCADE, blank=False, null=True)
customerId = models.ForeignKey(Customer, on_delete=models.CASCADE, blank=False, null=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Wishlist(models.Model):
productId = models.ForeignKey(Product, on_delete=models.CASCADE, blank=False, null=True)
customerId = models.ForeignKey(Customer, on_delete=models.CASCADE, blank=False, null=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Offer(models.Model):
productId = models.ForeignKey(Product, on_delete=models.CASCADE, blank=False, null=True)
offerAmount = models.IntegerField(blank=False, null=False)
startDate = models.DateTimeField(blank=False, null=True)
endDate = models.DateTimeField(blank=False, null=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Voucher(models.Model):
tag = models.CharField(max_length=50, blank=True, null=True)
productId = models.ForeignKey(Product, on_delete=models.CASCADE, blank=False, null=True)
amountDeducted = models.IntegerField(default=0, blank=True, null=False)
status = models.CharField(max_length=50, null=False, blank=True)
limit = models.DateTimeField(blank=False, null=False)
| 46.487179
| 108
| 0.760985
| 718
| 5,439
| 5.623955
| 0.122563
| 0.0842
| 0.11788
| 0.148588
| 0.82838
| 0.812779
| 0.73898
| 0.73898
| 0.723873
| 0.667162
| 0
| 0.008833
| 0.125758
| 5,439
| 116
| 109
| 46.887931
| 0.840379
| 0.007722
| 0
| 0.522727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.011364
| 0.011364
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
051c14e9eb12e3318b8f214e8a3a16ee4d08332e
| 3,880
|
py
|
Python
|
properties/migrations/0012_auto_20200706_0055.py
|
Zayanto/Protocol-CRM
|
c81489d69de581d8216e20f7dd80089116f85c7b
|
[
"MIT"
] | null | null | null |
properties/migrations/0012_auto_20200706_0055.py
|
Zayanto/Protocol-CRM
|
c81489d69de581d8216e20f7dd80089116f85c7b
|
[
"MIT"
] | null | null | null |
properties/migrations/0012_auto_20200706_0055.py
|
Zayanto/Protocol-CRM
|
c81489d69de581d8216e20f7dd80089116f85c7b
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2020-07-06 00:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('properties', '0011_auto_20200701_1936'),
]
operations = [
migrations.AlterField(
model_name='stageopportunity',
name='address',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='apartament_number',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='asking_price',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='bathrooms',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='bedrooms',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='build_sqm',
field=models.DecimalField(blank=True, decimal_places=1, max_digits=10, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='building',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='city',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='entrance',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='floor',
field=models.DecimalField(blank=True, decimal_places=1, max_digits=10, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='kitchen',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='number_of_rooms',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='owner',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='potential_rent',
field=models.DecimalField(blank=True, decimal_places=1, max_digits=10, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='reper',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='residence_complex',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='usable_sqm',
field=models.DecimalField(blank=True, decimal_places=1, max_digits=10, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='vecinatati',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AlterField(
model_name='stageopportunity',
name='zipcode',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
| 35.59633
| 94
| 0.582732
| 360
| 3,880
| 6.144444
| 0.194444
| 0.17179
| 0.214738
| 0.249096
| 0.857143
| 0.857143
| 0.834991
| 0.834991
| 0.834991
| 0.834991
| 0
| 0.028096
| 0.302835
| 3,880
| 108
| 95
| 35.925926
| 0.789649
| 0.011598
| 0
| 0.745098
| 1
| 0
| 0.134099
| 0.006001
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009804
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0529844d8687e3e9e48e19eef191451d38f910b2
| 2,794
|
py
|
Python
|
test_MovingAverage.py
|
eugenexlin/nice-face
|
c10bac3420414715aee9ed6f6b496e78bcb81f76
|
[
"MIT"
] | null | null | null |
test_MovingAverage.py
|
eugenexlin/nice-face
|
c10bac3420414715aee9ed6f6b496e78bcb81f76
|
[
"MIT"
] | null | null | null |
test_MovingAverage.py
|
eugenexlin/nice-face
|
c10bac3420414715aee9ed6f6b496e78bcb81f76
|
[
"MIT"
] | null | null | null |
import sys
import numpy as np
from nicefacelib.src.utils import MovingAverage
def test_MovingAverage_1():
val = MovingAverage.MovingAverage(3)
assert val.current() == 0
val.push(3)
assert val.current() == 3
val.push(6)
assert val.current() == 4
val.push(6)
assert val.current() == 5
val.push(6)
assert val.current() == 6
val.push(-6)
assert val.current() == 2
val.push(-6)
assert val.current() == -2
val.push(-6)
assert val.current() == -6
def test_MovingAverage_2():
val = MovingAverage.MovingAverage(5)
assert val.current() == 0
val.push(5)
assert val.current() == 5
val.push(0)
assert val.current() == 4
val.push(0)
assert val.current() == 3
val.push(0)
assert val.current() == 2
val.push(0)
assert val.current() == 1
val.push(0)
assert val.current() == 0
def test_MovingAverage_ForceVal():
val = MovingAverage.MovingAverage(3)
assert val.current() == 0
val.push(3)
assert val.current() == 3
val.push(6)
assert val.current() == 4
val.forceVal(6)
assert val.current() == 6
val.push(-6)
assert val.current() == 2
val.push(-6)
assert val.current() == -2
val.push(-6)
assert val.current() == -6
def test_MovingAverage_TestArr():
val = MovingAverage.MovingAverage(3,2)
val.forceVal([0,0])
np.testing.assert_array_equal( val.current() , [0,0])
val.push([3,6])
np.testing.assert_array_equal( val.current() , [1,2])
val.push([3,6])
np.testing.assert_array_equal( val.current() , [2,4])
val.push([3,6])
np.testing.assert_array_equal( val.current() , [3,6])
def test_MovingAverage_TestTuple():
val = MovingAverage.MovingAverage(3,2)
val.forceVal((0,0))
np.testing.assert_array_equal( val.current() , (0,0))
val.push((3,6))
np.testing.assert_array_equal( val.current() , (1,2))
val.push((3,6))
np.testing.assert_array_equal( val.current() , (2,4))
val.push((3,6))
np.testing.assert_array_equal( val.current() , (3,6))
def test_MovingAverage_TestFloat():
val = MovingAverage.MovingAverage(5,2)
val.forceVal([0.0,0.0])
np.testing.assert_array_equal( val.current() , [0.0,0.0])
val.push([1,2])
np.testing.assert_array_equal( val.current() , [0.2,0.4])
val.push([1,2])
np.testing.assert_array_equal( val.current() , [0.4,0.8])
val.push([1,2])
np.testing.assert_array_equal( val.current() , [0.6,1.2])
def test_MovingAverage_TestArr2():
val = MovingAverage.MovingAverage(3,2)
val.push([3,6])
np.testing.assert_array_equal( val.current() , [3,6])
val.push([0,0])
np.testing.assert_array_equal( val.current() , [2,4])
val.push([0,0])
np.testing.assert_array_equal( val.current() , [1,2])
| 29.723404
| 61
| 0.625984
| 422
| 2,794
| 4.040284
| 0.075829
| 0.217009
| 0.206452
| 0.175953
| 0.852786
| 0.844575
| 0.717889
| 0.713783
| 0.713783
| 0.71261
| 0
| 0.055655
| 0.196135
| 2,794
| 94
| 62
| 29.723404
| 0.703473
| 0
| 0
| 0.693182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.420455
| 1
| 0.079545
| false
| 0
| 0.034091
| 0
| 0.113636
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e9a8335b908064b77fad57b207895147750c0636
| 1,537
|
py
|
Python
|
tests/test_graphs_Edge.py
|
josiah-wolf-oberholtzer/uqbar
|
96f86eb6264b0677a9e2931a527769640e5658b6
|
[
"MIT"
] | 7
|
2018-12-02T05:59:54.000Z
|
2021-12-28T22:40:18.000Z
|
tests/test_graphs_Edge.py
|
josiah-wolf-oberholtzer/uqbar
|
96f86eb6264b0677a9e2931a527769640e5658b6
|
[
"MIT"
] | 16
|
2017-12-28T22:08:09.000Z
|
2022-02-26T14:47:23.000Z
|
tests/test_graphs_Edge.py
|
josiah-wolf-oberholtzer/uqbar
|
96f86eb6264b0677a9e2931a527769640e5658b6
|
[
"MIT"
] | 5
|
2020-03-28T14:57:47.000Z
|
2022-02-01T10:02:18.000Z
|
import unittest
import uqbar.graphs
from uqbar.strings import normalize
class TestCase(unittest.TestCase):
def test___init__(self):
node_a = uqbar.graphs.Node(name="foo")
node_b = uqbar.graphs.Node(name="bar")
uqbar.graphs.Edge().attach(node_a, node_b)
def test___format___str(self):
node_a = uqbar.graphs.Node(name="foo")
node_b = uqbar.graphs.Node(name="bar")
edge = uqbar.graphs.Edge().attach(node_a, node_b)
assert format(edge) == repr(edge)
node_a = uqbar.graphs.Node(name="foo")
node_b = uqbar.graphs.Node(name="bar")
attributes = uqbar.graphs.Attributes(
mode="edge", color="blue", style=["dotted"]
)
edge = uqbar.graphs.Edge(attributes=attributes).attach(node_a, node_b)
assert format(edge) == repr(edge)
def test___format___graphviz(self):
node_a = uqbar.graphs.Node(name="foo")
node_b = uqbar.graphs.Node(name="bar")
edge = uqbar.graphs.Edge().attach(node_a, node_b)
assert format(edge, "graphviz") == "foo -> bar;"
node_a = uqbar.graphs.Node(name="foo")
node_b = uqbar.graphs.Node(name="bar")
attributes = uqbar.graphs.Attributes(
mode="edge", color="blue", style=["dotted"]
)
edge = uqbar.graphs.Edge(attributes=attributes).attach(node_a, node_b)
assert format(edge, "graphviz") == normalize(
"""
foo -> bar [color=blue,
style=dotted];
"""
)
| 34.155556
| 78
| 0.601171
| 193
| 1,537
| 4.595855
| 0.165803
| 0.223224
| 0.169109
| 0.214205
| 0.797069
| 0.797069
| 0.797069
| 0.797069
| 0.76212
| 0.744081
| 0
| 0
| 0.25309
| 1,537
| 44
| 79
| 34.931818
| 0.772648
| 0
| 0
| 0.606061
| 0
| 0
| 0.058419
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 1
| 0.090909
| false
| 0
| 0.090909
| 0
| 0.212121
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e9abe86fad5b63d6ce80a83dd14c0d3e37722352
| 8,332
|
py
|
Python
|
port/modules/font/dvsm_12.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 53
|
2018-10-15T12:01:24.000Z
|
2019-11-22T09:31:02.000Z
|
port/modules/font/dvsm_12.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 10
|
2018-10-17T13:42:19.000Z
|
2019-11-25T06:42:40.000Z
|
port/modules/font/dvsm_12.py
|
diskman88/mpython-desktop-robot
|
01cd15fbeeba521ab874cf66f94d3909c4f8c39a
|
[
"MIT"
] | 26
|
2018-12-04T03:53:39.000Z
|
2019-11-22T03:40:05.000Z
|
# Code generated by font-to-py.py.
# Font: dsm.ttf
version = '0.26'
def height():
return 12
def max_width():
return 7
def hmap():
return True
def reverse():
return False
def monospaced():
return False
def min_ch():
return 32
def max_ch():
return 126
_font =\
b'\x07\x00\x00\xe0\x10\x30\x60\x40\x40\x00\x40\x00\x00\x00\x07\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x80'\
b'\x80\x80\x80\x80\x80\x00\x80\x00\x00\x00\x07\x00\x00\xa0\xa0\xa0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x14\x24\x7e\x28\x28'\
b'\xfc\x48\x50\x00\x00\x00\x07\x00\x00\x20\x78\xa0\xa0\x70\x28\x28'\
b'\xf0\x20\x20\x00\x07\x00\x00\xe0\xa0\xe4\x18\x20\xdc\x14\x1c\x00'\
b'\x00\x00\x07\x00\x00\x70\x40\x40\x60\xb4\x94\x88\x7c\x00\x00\x00'\
b'\x07\x00\x00\x80\x80\x80\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00'\
b'\x40\x80\x80\x80\x80\x80\x80\x80\x80\x40\x00\x00\x07\x00\x80\x80'\
b'\x40\x40\x40\x40\x40\x40\x80\x80\x00\x00\x07\x00\x00\x20\xa8\x70'\
b'\x70\xa8\x20\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x20\x20\xf8'\
b'\x20\x20\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x40'\
b'\x40\x80\x00\x00\x07\x00\x00\x00\x00\x00\x00\xe0\x00\x00\x00\x00'\
b'\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x80\x80\x00\x00\x00'\
b'\x07\x00\x00\x08\x10\x10\x20\x20\x20\x40\x40\x80\x00\x00\x07\x00'\
b'\x00\x78\xcc\x84\x94\x84\x84\xcc\x78\x00\x00\x00\x07\x00\x00\xe0'\
b'\x20\x20\x20\x20\x20\x20\xf8\x00\x00\x00\x07\x00\x00\x78\x84\x04'\
b'\x0c\x18\x30\x40\xfc\x00\x00\x00\x07\x00\x00\x78\x84\x04\x78\x0c'\
b'\x04\x84\x78\x00\x00\x00\x07\x00\x00\x18\x18\x28\x48\xc8\xfc\x08'\
b'\x08\x00\x00\x00\x07\x00\x00\xf8\x80\x80\xf8\x0c\x04\x04\xf8\x00'\
b'\x00\x00\x07\x00\x00\x3c\x40\x80\xb8\xc4\x84\x84\x78\x00\x00\x00'\
b'\x07\x00\x00\xfc\x08\x08\x10\x10\x20\x20\x40\x00\x00\x00\x07\x00'\
b'\x00\x78\x84\x84\x78\x84\x84\x84\x78\x00\x00\x00\x07\x00\x00\x78'\
b'\x84\x84\x84\x7c\x04\x08\xf0\x00\x00\x00\x07\x00\x00\x00\x00\x80'\
b'\x80\x00\x00\x80\x80\x00\x00\x00\x07\x00\x00\x00\x00\x40\x40\x00'\
b'\x00\x40\x40\x80\x00\x00\x07\x00\x00\x00\x00\x04\x38\xc0\x70\x0c'\
b'\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\xfc\x00\xfc\x00\x00\x00'\
b'\x00\x00\x07\x00\x00\x00\x00\x80\x70\x0c\x38\xc0\x00\x00\x00\x00'\
b'\x07\x00\x00\xe0\x10\x30\x60\x40\x40\x00\x40\x00\x00\x00\x07\x00'\
b'\x00\x38\x4c\x84\x9c\xa4\xa4\x9c\xc0\x40\x38\x00\x07\x00\x00\x30'\
b'\x30\x30\x48\x48\x78\x84\x84\x00\x00\x00\x07\x00\x00\xf8\x84\x84'\
b'\xf8\x84\x84\x84\xf8\x00\x00\x00\x07\x00\x00\x38\x44\x80\x80\x80'\
b'\x80\x44\x38\x00\x00\x00\x07\x00\x00\xf0\x88\x84\x84\x84\x84\x88'\
b'\xf0\x00\x00\x00\x07\x00\x00\xfc\x80\x80\xfc\x80\x80\x80\xfc\x00'\
b'\x00\x00\x07\x00\x00\xfc\x80\x80\xfc\x80\x80\x80\x80\x00\x00\x00'\
b'\x07\x00\x00\x38\x44\x80\x80\x8c\x84\x44\x38\x00\x00\x00\x07\x00'\
b'\x00\x84\x84\x84\xfc\x84\x84\x84\x84\x00\x00\x00\x07\x00\x00\xf8'\
b'\x20\x20\x20\x20\x20\x20\xf8\x00\x00\x00\x07\x00\x00\x38\x08\x08'\
b'\x08\x08\x08\x88\x70\x00\x00\x00\x07\x00\x00\x88\x90\xa0\xc0\xa0'\
b'\x90\x88\x84\x00\x00\x00\x07\x00\x00\x80\x80\x80\x80\x80\x80\x80'\
b'\xfc\x00\x00\x00\x07\x00\x00\x84\xcc\xcc\xb4\xb4\x84\x84\x84\x00'\
b'\x00\x00\x07\x00\x00\x84\xc4\xa4\xa4\x94\x94\x8c\x84\x00\x00\x00'\
b'\x07\x00\x00\x78\xcc\x84\x84\x84\x84\xcc\x78\x00\x00\x00\x07\x00'\
b'\x00\xf8\x84\x84\x84\xf8\x80\x80\x80\x00\x00\x00\x07\x00\x00\x78'\
b'\xcc\x84\x84\x84\x84\xcc\x78\x0c\x00\x00\x07\x00\x00\xf8\x84\x84'\
b'\x84\xf8\x88\x84\x82\x00\x00\x00\x07\x00\x00\x78\x84\x80\xf0\x0c'\
b'\x04\x84\x78\x00\x00\x00\x07\x00\x00\xfe\x10\x10\x10\x10\x10\x10'\
b'\x10\x00\x00\x00\x07\x00\x00\x84\x84\x84\x84\x84\x84\x84\x78\x00'\
b'\x00\x00\x07\x00\x00\x84\x84\x48\x48\x48\x30\x30\x30\x00\x00\x00'\
b'\x07\x00\x00\x82\x92\x92\xaa\x6c\x6c\x44\x44\x00\x00\x00\x07\x00'\
b'\x00\x84\x48\x48\x30\x30\x48\x48\x84\x00\x00\x00\x07\x00\x00\xc6'\
b'\x44\x28\x38\x10\x10\x10\x10\x00\x00\x00\x07\x00\x00\xfc\x08\x08'\
b'\x10\x20\x60\x40\xfc\x00\x00\x00\x07\x00\xc0\x80\x80\x80\x80\x80'\
b'\x80\x80\x80\xc0\x00\x00\x07\x00\x00\x80\x40\x40\x20\x20\x20\x10'\
b'\x10\x08\x00\x00\x07\x00\xc0\x40\x40\x40\x40\x40\x40\x40\x40\xc0'\
b'\x00\x00\x07\x00\x00\x30\x48\x84\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\x00\x00\x07\x00'\
b'\x40\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00'\
b'\x00\xf0\x08\x78\x88\x88\x78\x00\x00\x00\x07\x00\x80\x80\x80\xf0'\
b'\x88\x88\x88\x88\xf0\x00\x00\x00\x07\x00\x00\x00\x00\x78\xc0\x80'\
b'\x80\xc0\x78\x00\x00\x00\x07\x00\x08\x08\x08\x78\x88\x88\x88\x88'\
b'\x78\x00\x00\x00\x07\x00\x00\x00\x00\x70\x88\xf8\x80\x80\x78\x00'\
b'\x00\x00\x07\x00\x18\x20\x20\xf8\x20\x20\x20\x20\x20\x00\x00\x00'\
b'\x07\x00\x00\x00\x00\x78\x88\x88\x88\x88\x78\x08\x70\x00\x07\x00'\
b'\x80\x80\x80\xb0\xc8\x88\x88\x88\x88\x00\x00\x00\x07\x00\x20\x00'\
b'\x00\xe0\x20\x20\x20\x20\xf8\x00\x00\x00\x07\x00\x20\x00\x00\xe0'\
b'\x20\x20\x20\x20\x20\x20\xc0\x00\x07\x00\x80\x80\x80\x90\xa0\xc0'\
b'\xa0\x90\x88\x00\x00\x00\x07\x00\xe0\x20\x20\x20\x20\x20\x20\x20'\
b'\x18\x00\x00\x00\x07\x00\x00\x00\x00\xf8\xa8\xa8\xa8\xa8\xa8\x00'\
b'\x00\x00\x07\x00\x00\x00\x00\xb0\xc8\x88\x88\x88\x88\x00\x00\x00'\
b'\x07\x00\x00\x00\x00\x70\x88\x88\x88\x88\x70\x00\x00\x00\x07\x00'\
b'\x00\x00\x00\xf0\x88\x88\x88\x88\xf0\x80\x80\x00\x07\x00\x00\x00'\
b'\x00\x78\x88\x88\x88\x88\x78\x08\x08\x00\x07\x00\x00\x00\x00\xf0'\
b'\x90\x80\x80\x80\x80\x00\x00\x00\x07\x00\x00\x00\x00\x78\x80\xe0'\
b'\x18\x08\xf0\x00\x00\x00\x07\x00\x00\x20\x20\xf8\x20\x20\x20\x20'\
b'\x38\x00\x00\x00\x07\x00\x00\x00\x00\x88\x88\x88\x88\x88\x78\x00'\
b'\x00\x00\x07\x00\x00\x00\x00\x88\x88\x50\x50\x50\x20\x00\x00\x00'\
b'\x07\x00\x00\x00\x00\x82\x82\x54\x54\x28\x28\x00\x00\x00\x07\x00'\
b'\x00\x00\x00\xd8\x50\x20\x20\x50\xd8\x00\x00\x00\x07\x00\x00\x00'\
b'\x00\x88\x90\x50\x50\x60\x20\x40\xc0\x00\x07\x00\x00\x00\x00\xf8'\
b'\x10\x30\x60\x40\xf8\x00\x00\x00\x07\x00\x38\x20\x20\x20\xc0\x20'\
b'\x20\x20\x20\x38\x00\x00\x07\x00\x80\x80\x80\x80\x80\x80\x80\x80'\
b'\x80\x80\x80\x00\x07\x00\xe0\x20\x20\x20\x18\x20\x20\x20\x20\xe0'\
b'\x00\x00\x07\x00\x00\x00\x00\x00\x00\xe0\x1c\x00\x00\x00\x00\x00'\
_index =\
b'\x00\x00\x0e\x00\x0e\x00\x1c\x00\x1c\x00\x2a\x00\x2a\x00\x38\x00'\
b'\x38\x00\x46\x00\x46\x00\x54\x00\x54\x00\x62\x00\x62\x00\x70\x00'\
b'\x70\x00\x7e\x00\x7e\x00\x8c\x00\x8c\x00\x9a\x00\x9a\x00\xa8\x00'\
b'\xa8\x00\xb6\x00\xb6\x00\xc4\x00\xc4\x00\xd2\x00\xd2\x00\xe0\x00'\
b'\xe0\x00\xee\x00\xee\x00\xfc\x00\xfc\x00\x0a\x01\x0a\x01\x18\x01'\
b'\x18\x01\x26\x01\x26\x01\x34\x01\x34\x01\x42\x01\x42\x01\x50\x01'\
b'\x50\x01\x5e\x01\x5e\x01\x6c\x01\x6c\x01\x7a\x01\x7a\x01\x88\x01'\
b'\x88\x01\x96\x01\x96\x01\xa4\x01\xa4\x01\xb2\x01\xb2\x01\xc0\x01'\
b'\xc0\x01\xce\x01\xce\x01\xdc\x01\xdc\x01\xea\x01\xea\x01\xf8\x01'\
b'\xf8\x01\x06\x02\x06\x02\x14\x02\x14\x02\x22\x02\x22\x02\x30\x02'\
b'\x30\x02\x3e\x02\x3e\x02\x4c\x02\x4c\x02\x5a\x02\x5a\x02\x68\x02'\
b'\x68\x02\x76\x02\x76\x02\x84\x02\x84\x02\x92\x02\x92\x02\xa0\x02'\
b'\xa0\x02\xae\x02\xae\x02\xbc\x02\xbc\x02\xca\x02\xca\x02\xd8\x02'\
b'\xd8\x02\xe6\x02\xe6\x02\xf4\x02\xf4\x02\x02\x03\x02\x03\x10\x03'\
b'\x10\x03\x1e\x03\x1e\x03\x2c\x03\x2c\x03\x3a\x03\x3a\x03\x48\x03'\
b'\x48\x03\x56\x03\x56\x03\x64\x03\x64\x03\x72\x03\x72\x03\x80\x03'\
b'\x80\x03\x8e\x03\x8e\x03\x9c\x03\x9c\x03\xaa\x03\xaa\x03\xb8\x03'\
b'\xb8\x03\xc6\x03\xc6\x03\xd4\x03\xd4\x03\xe2\x03\xe2\x03\xf0\x03'\
b'\xf0\x03\xfe\x03\xfe\x03\x0c\x04\x0c\x04\x1a\x04\x1a\x04\x28\x04'\
b'\x28\x04\x36\x04\x36\x04\x44\x04\x44\x04\x52\x04\x52\x04\x60\x04'\
b'\x60\x04\x6e\x04\x6e\x04\x7c\x04\x7c\x04\x8a\x04\x8a\x04\x98\x04'\
b'\x98\x04\xa6\x04\xa6\x04\xb4\x04\xb4\x04\xc2\x04\xc2\x04\xd0\x04'\
b'\xd0\x04\xde\x04\xde\x04\xec\x04\xec\x04\xfa\x04\xfa\x04\x08\x05'\
b'\x08\x05\x16\x05\x16\x05\x24\x05\x24\x05\x32\x05\x32\x05\x40\x05'\
_mvfont = memoryview(_font)
def get_ch(ch):
ordch = ord(ch)
ordch = ordch + 1 if ordch >= 32 and ordch <= 126 else 63
idx_offs = 4 * (ordch - 32)
offset = int.from_bytes(_index[idx_offs : idx_offs + 2], 'little')
next_offs = int.from_bytes(_index[idx_offs + 2 : idx_offs + 4], 'little')
width = int.from_bytes(_font[offset:offset + 2], 'little')
return _mvfont[offset + 2:next_offs], 12, width
| 55.919463
| 78
| 0.690711
| 1,949
| 8,332
| 2.941508
| 0.082606
| 0.358974
| 0.277865
| 0.169545
| 0.575615
| 0.525379
| 0.468864
| 0.34345
| 0.270888
| 0.153672
| 0
| 0.401569
| 0.051368
| 8,332
| 148
| 79
| 56.297297
| 0.32376
| 0.005521
| 0
| 0.029851
| 1
| 0.80597
| 0.852366
| 0.849662
| 0
| 1
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0
| 0
| 0.052239
| 0.119403
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e9ac951d628726cd92821ee182b99c869f2d19da
| 27,823
|
py
|
Python
|
tests/layers/test_conv_gradfix.py
|
kynk94/torch-firewood
|
8ecd03c166bcadaae22a6cb2c1457a82f2c644eb
|
[
"MIT"
] | 1
|
2022-03-26T12:51:27.000Z
|
2022-03-26T12:51:27.000Z
|
tests/layers/test_conv_gradfix.py
|
kynk94/torch-firewood
|
8ecd03c166bcadaae22a6cb2c1457a82f2c644eb
|
[
"MIT"
] | null | null | null |
tests/layers/test_conv_gradfix.py
|
kynk94/torch-firewood
|
8ecd03c166bcadaae22a6cb2c1457a82f2c644eb
|
[
"MIT"
] | null | null | null |
import itertools
import random
from typing import Type, Union
import pytest
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from firewood.layers import conv_gradfix
from tests.helpers.runif import runif
from tests.helpers.utils import gen_params
from tests.stylegan3.torch_utils.ops import conv2d_gradfix
def test_conv_output_shape():
lr = 1e-2
C = 1
padding_modes = ["zeros", 1, "reflect", "replicate", "circular"]
for i in range(200):
K = random.randint(1, 10)
S = random.randint(1, 3)
P_len = random.choice((2, 4))
P = tuple(random.randint(0, 3) for _ in range(P_len))
E = random.randint(S * K + sum(P), 100)
O_H = (E - K + sum(P[: P_len // 2]) * 4 // P_len) // S + 1
O_W = (E - K + sum(P[P_len // 2 :]) * 4 // P_len) // S + 1
input_shape = (1, C, E, E)
output_shape = (1, C, O_H, O_W)
padding_mode = padding_modes[i % len(padding_modes)]
input = torch.randn(input_shape)
conv = conv_gradfix.GFixConv2d(
C, C, K, S, P, bias=True, padding_mode=padding_mode
)
optimizer = torch.optim.Adam(conv.parameters(), lr=lr)
optimizer.zero_grad()
output: Tensor = conv(input)
loss = F.mse_loss(output, torch.zeros_like(output))
loss.backward()
optimizer.step()
assert (
output.shape == output_shape
), "{} != {} for input: {}, kernel: {} stride: {} padding: {} padding_mode: {} reversed_padding: {}".format(
tuple(output.shape),
output_shape,
input_shape,
K,
S,
P,
padding_mode,
conv._reversed_padding_repeated_twice,
)
def test_conv_transpose_output_shape():
lr = 1e-2
C = 1
for i in range(200):
K = random.randint(1, 10)
S = random.randint(1, 3)
P_len = random.choice((2, 4))
P = tuple(random.randint(0, 3) for _ in range(P_len))
E = random.randint(K + sum(P) + 10, 100)
O = (E - 1) * S + K
O_H = O - sum(P[: P_len // 2]) * 4 // P_len
O_W = O - sum(P[P_len // 2 :]) * 4 // P_len
input_shape = (1, C, E, E)
output_shape = (1, C, O_H, O_W)
input = torch.randn(input_shape)
conv = conv_gradfix.GFixConvTranspose2d(C, C, K, S, P, bias=True)
optimizer = torch.optim.Adam(conv.parameters(), lr=lr)
optimizer.zero_grad()
output: Tensor = conv(input)
loss = F.mse_loss(output, torch.zeros_like(output))
loss.backward()
optimizer.step()
assert (
output.shape == output_shape
), "{} != {} for input: {}, kernel: {} stride: {} padding: {} reversed_padding: {}".format(
tuple(output.shape),
output_shape,
input_shape,
K,
S,
P,
conv._reversed_padding_repeated_twice,
)
@pytest.mark.parametrize(
*gen_params(
["rank", "stride", "padding"],
itertools.product([1, 2, 3], [1, 2], [0, "same"]),
)
)
def test_no_weight_gradients_in_gfix_conv(
rank: int, stride: int, padding: Union[str, int]
) -> None:
in_channels = 3
out_channels = 10
kernel_size = 5
embedding_size = 10
x = torch.randn(
size=(2, in_channels, *(embedding_size,) * rank), requires_grad=True
)
x_copy = x.detach().requires_grad_()
operation: Type[conv_gradfix._GFixConvNd] = getattr(
conv_gradfix, f"GFixConv{rank}d"
)
conv = operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
)
conv_copy = operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
)
conv_copy.weight.data = conv.weight.data
conv_copy.bias.data = conv.bias.data
y: Tensor = conv(x)
y_copy: Tensor = conv_copy(x_copy)
with conv_gradfix.no_weight_gradients_in_gfix_conv():
weight_grad = torch.autograd.grad(
outputs=[y.square().sum() + conv.weight.square().sum()],
inputs=[conv.weight],
create_graph=True,
)[0]
weight_grad_copy = torch.autograd.grad(
outputs=[y_copy.square().sum() + conv_copy.weight.square().sum()],
inputs=[conv_copy.weight],
create_graph=True,
)[0]
assert not torch.allclose(
weight_grad, weight_grad_copy, atol=1e-1
), f"Forward weight_grad should be different. l1: {F.l1_loss(weight_grad, weight_grad_copy)}"
@pytest.mark.parametrize(*gen_params(["rank"], [1, 2, 3]))
def test_conv_same_padding_gradient_with_nn(rank: int) -> None:
in_channels = 3
out_channels = 10
kernel_size = 5
stride = 2
embedding_size = 7
x = torch.randn(
size=(2, in_channels, *(embedding_size,) * rank), requires_grad=True
)
x_copy = x.detach().requires_grad_()
operation: Type[conv_gradfix._GFixConvNd] = getattr(
conv_gradfix, f"GFixConv{rank}d"
)
conv = operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding="same",
bias=True,
)
conv_copy = operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=2,
bias=True,
)
conv_copy.weight.data = conv.weight.data
conv_copy.bias.data = conv.bias.data
y: Tensor = conv(x)
y_copy: Tensor = conv_copy(x_copy)
assert (
y.shape == y_copy.shape
), f"Forward output shape mismatch. {y.shape} != {y_copy.shape}"
assert torch.allclose(
y, y_copy
), f"Forward result mismatch. l1: {F.l1_loss(y, y_copy)}"
weight_grad = torch.autograd.grad(
outputs=[y.square().sum() + conv.weight.square().sum()],
inputs=[conv.weight],
create_graph=True,
)[0]
weight_grad_copy = torch.autograd.grad(
outputs=[y_copy.square().sum() + conv_copy.weight.square().sum()],
inputs=[conv_copy.weight],
create_graph=True,
)[0]
assert torch.allclose(
weight_grad, weight_grad_copy
), f"Forward weight_grad mismatch. l1: {F.l1_loss(weight_grad, weight_grad_copy)}"
@pytest.mark.parametrize(
*gen_params(
["rank", "stride", "padding"],
itertools.product([1, 2, 3], [1, 2], [1, 2]),
)
)
def test_conv_with_nn_cpu(rank: int, stride: int, padding: int) -> None:
lr = 1e-2
in_channels = 3
out_channels = 10
kernel_size = 5
embedding_size = 10
x_custom = torch.randn(
size=(2, in_channels, *(embedding_size,) * rank), requires_grad=True
)
x_original = x_custom.detach().requires_grad_()
custom_operation: Type[conv_gradfix._GFixConvNd] = getattr(
conv_gradfix, f"GFixConv{rank}d"
)
nn_operation: Type[nn.modules.conv._ConvNd] = getattr(nn, f"Conv{rank}d")
custom_conv = custom_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
)
nn_conv = nn_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
)
nn_conv.weight.data = custom_conv.weight.data
nn_conv.bias.data = custom_conv.bias.data
custom_parameters = [x_custom, *custom_conv.parameters()]
original_parameters = [x_original, *nn_conv.parameters()]
optimizer_custom = torch.optim.Adam(custom_parameters, lr=lr)
optimizer_original = torch.optim.Adam(original_parameters, lr=lr)
optimizer_custom.zero_grad()
optimizer_original.zero_grad()
y_custom: Tensor = custom_conv(x_custom)
y_original: Tensor = nn_conv(x_original)
loss_custom = y_custom.square().sum()
loss_original = y_original.square().sum()
weight_grad_custom = torch.autograd.grad(
outputs=[loss_custom], inputs=[custom_conv.weight], create_graph=True
)[0]
weight_grad_original = torch.autograd.grad(
outputs=[loss_original], inputs=[nn_conv.weight], create_graph=True
)[0]
absolute_tolerence = 1e-6 * 10**rank
assert torch.allclose(
y_custom, y_original
), f"Forward output mismatch. l1: {F.l1_loss(y_custom, y_original)}"
assert torch.allclose(
weight_grad_custom, weight_grad_original, atol=absolute_tolerence
), f"Forward weight_grad mismatch. l1: {F.l1_loss(weight_grad_custom, weight_grad_original)}"
loss_custom += weight_grad_custom.square().sum()
loss_original += weight_grad_original.square().sum()
loss_custom.backward()
loss_original.backward()
optimizer_custom.step()
optimizer_original.step()
assert torch.allclose(
x_custom, x_original
), f"Backward input mismatch. l1: {F.l1_loss(x_custom, x_original)}"
assert torch.allclose(
custom_conv.weight, nn_conv.weight
), f"Backward weight mismatch. l1: {F.l1_loss(custom_conv.weight, nn_conv.weight)}"
assert torch.allclose(
custom_conv.bias, nn_conv.bias
), f"Backward bias mismatch. l1: {F.l1_loss(custom_conv.bias, nn_conv.bias)}"
@runif(min_gpus=1)
@pytest.mark.parametrize(
*gen_params(
["rank", "stride", "padding"],
itertools.product(range(1, 4), range(1, 3), range(2)),
)
)
def test_conv_with_nn_gpu(rank: int, stride: int, padding: int) -> None:
lr = 1e-2
in_channels = 3
out_channels = 10
kernel_size = 5
embedding_size = 10
x_custom = torch.randn(
size=(2, in_channels, *(embedding_size,) * rank),
requires_grad=True,
device="cuda",
)
x_original = x_custom.detach().requires_grad_()
custom_operation: Type[conv_gradfix._GFixConvNd] = getattr(
conv_gradfix, f"GFixConv{rank}d"
)
nn_operation: Type[nn.modules.conv._ConvNd] = getattr(nn, f"Conv{rank}d")
custom_conv = custom_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
).cuda()
nn_conv = nn_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
).cuda()
nn_conv.weight.data = custom_conv.weight.data
nn_conv.bias.data = custom_conv.bias.data
custom_parameters = [x_custom, *custom_conv.parameters()]
original_parameters = [x_original, *nn_conv.parameters()]
optimizer_custom = torch.optim.Adam(custom_parameters, lr=lr)
optimizer_original = torch.optim.Adam(original_parameters, lr=lr)
optimizer_custom.zero_grad()
optimizer_original.zero_grad()
y_custom: Tensor = custom_conv(x_custom)
y_original: Tensor = nn_conv(x_original)
loss_custom = y_custom.square().sum()
loss_original = y_original.square().sum()
weight_grad_custom = torch.autograd.grad(
outputs=[loss_custom], inputs=[custom_conv.weight], create_graph=True
)[0]
weight_grad_original = torch.autograd.grad(
outputs=[loss_original], inputs=[nn_conv.weight], create_graph=True
)[0]
absolute_tolerence = 1e-7 * 10**rank
assert torch.allclose(
y_custom, y_original
), f"Forward output mismatch. l1: {F.l1_loss(y_custom, y_original)}"
assert torch.allclose(
weight_grad_custom, weight_grad_original, atol=absolute_tolerence
), f"Forward weight_grad mismatch. l1: {F.l1_loss(weight_grad_custom, weight_grad_original)}"
loss_custom += weight_grad_custom.square().sum()
loss_original += weight_grad_original.square().sum()
loss_custom.backward()
loss_original.backward()
optimizer_custom.step()
optimizer_original.step()
assert torch.allclose(
x_custom, x_original
), f"Backward input mismatch. l1: {F.l1_loss(x_custom, x_original)}"
assert torch.allclose(
custom_conv.weight, nn_conv.weight
), f"Backward weight mismatch. l1: {F.l1_loss(custom_conv.weight, nn_conv.weight)}"
assert torch.allclose(
custom_conv.bias, nn_conv.bias
), f"Backward bias mismatch. l1: {F.l1_loss(custom_conv.bias, nn_conv.bias)}"
@pytest.mark.parametrize(
*gen_params(
["rank", "stride", "padding"],
itertools.product(range(1, 4), range(1, 3), range(2)),
)
)
def test_conv_transpose_with_nn_cpu(
rank: int, stride: int, padding: int
) -> None:
lr = 1e-2
in_channels = 3
out_channels = 10
kernel_size = 5
embedding_size = 10
x_custom = torch.randn(
size=(2, in_channels, *(embedding_size,) * rank), requires_grad=True
)
x_original = x_custom.detach().requires_grad_()
custom_operation: Type[conv_gradfix._GFixConvNd] = getattr(
conv_gradfix, f"GFixConvTranspose{rank}d"
)
nn_operation: Type[nn.modules.conv._ConvTransposeNd] = getattr(
nn, f"ConvTranspose{rank}d"
)
custom_conv = custom_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
)
nn_conv = nn_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
)
nn_conv.weight.data = custom_conv.weight.data
nn_conv.bias.data = custom_conv.bias.data
custom_parameters = [x_custom, *custom_conv.parameters()]
original_parameters = [x_original, *nn_conv.parameters()]
optimizer_custom = torch.optim.Adam(custom_parameters, lr=lr)
optimizer_original = torch.optim.Adam(original_parameters, lr=lr)
optimizer_custom.zero_grad()
optimizer_original.zero_grad()
y_custom: Tensor = custom_conv(x_custom)
y_original: Tensor = nn_conv(x_original)
loss_custom = y_custom.square().sum()
loss_original = y_original.square().sum()
weight_grad_custom = torch.autograd.grad(
outputs=[loss_custom], inputs=[custom_conv.weight], create_graph=True
)[0]
weight_grad_original = torch.autograd.grad(
outputs=[loss_original], inputs=[nn_conv.weight], create_graph=True
)[0]
absolute_tolerence = 1e-7 * 10**rank
assert torch.allclose(
y_custom, y_original, atol=absolute_tolerence
), f"Forward output mismatch. l1: {F.l1_loss(y_custom, y_original)}"
assert torch.allclose(
weight_grad_custom, weight_grad_original, atol=absolute_tolerence
), f"Forward weight_grad mismatch. l1: {F.l1_loss(weight_grad_custom, weight_grad_original)}"
loss_custom += weight_grad_custom.square().sum()
loss_original += weight_grad_original.square().sum()
loss_custom.backward()
loss_original.backward()
optimizer_custom.step()
optimizer_original.step()
assert torch.allclose(
x_custom, x_original
), f"Backward input mismatch. l1: {F.l1_loss(x_custom, x_original)}"
assert torch.allclose(
custom_conv.weight, nn_conv.weight
), f"Backward weight mismatch. l1: {F.l1_loss(custom_conv.weight, nn_conv.weight)}"
assert torch.allclose(
custom_conv.bias, nn_conv.bias
), f"Backward bias mismatch. l1: {F.l1_loss(custom_conv.bias, nn_conv.bias)}"
@runif(min_gpus=1)
@pytest.mark.parametrize(
*gen_params(
["rank", "stride", "padding"],
itertools.product(range(1, 4), range(1, 3), range(2)),
)
)
def test_conv_transpose_with_nn_gpu(
rank: int, stride: int, padding: int
) -> None:
lr = 1e-2
in_channels = 3
out_channels = 10
kernel_size = 5
embedding_size = 10
x_custom = torch.randn(
size=(2, in_channels, *(embedding_size,) * rank),
requires_grad=True,
device="cuda",
)
x_original = x_custom.detach().requires_grad_()
custom_operation: Type[conv_gradfix._GFixConvNd] = getattr(
conv_gradfix, f"GFixConvTranspose{rank}d"
)
nn_operation: Type[nn.modules.conv._ConvTransposeNd] = getattr(
nn, f"ConvTranspose{rank}d"
)
custom_conv = custom_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
).cuda()
nn_conv = nn_operation(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
).cuda()
nn_conv.weight.data = custom_conv.weight.data
nn_conv.bias.data = custom_conv.bias.data
custom_parameters = [x_custom, *custom_conv.parameters()]
original_parameters = [x_original, *nn_conv.parameters()]
optimizer_custom = torch.optim.Adam(custom_parameters, lr=lr)
optimizer_original = torch.optim.Adam(original_parameters, lr=lr)
optimizer_custom.zero_grad()
optimizer_original.zero_grad()
y_custom: Tensor = custom_conv(x_custom)
y_original: Tensor = nn_conv(x_original)
loss_custom = y_custom.square().sum()
loss_original = y_original.square().sum()
weight_grad_custom = torch.autograd.grad(
outputs=[loss_custom], inputs=[custom_conv.weight], create_graph=True
)[0]
weight_grad_original = torch.autograd.grad(
outputs=[loss_original], inputs=[nn_conv.weight], create_graph=True
)[0]
absolute_tolerence = 5e-5 * 10 ** (rank - padding)
assert torch.allclose(
y_custom, y_original, rtol=1e-4, atol=absolute_tolerence
), f"Forward output mismatch. l1: {F.l1_loss(y_custom, y_original)}"
assert torch.allclose(
weight_grad_custom,
weight_grad_original,
rtol=1e-4,
atol=absolute_tolerence,
), f"Forward weight_grad mismatch. l1: {F.l1_loss(weight_grad_custom, weight_grad_original)}"
loss_custom += weight_grad_custom.square().sum()
loss_original += weight_grad_original.square().sum()
loss_custom.backward()
loss_original.backward()
optimizer_custom.step()
optimizer_original.step()
assert torch.allclose(
x_custom, x_original
), f"Backward input mismatch. l1: {F.l1_loss(x_custom, x_original)}"
assert torch.allclose(
custom_conv.weight, nn_conv.weight
), f"Backward weight mismatch. l1: {F.l1_loss(custom_conv.weight, nn_conv.weight)}"
assert torch.allclose(
custom_conv.bias, nn_conv.bias
), f"Backward bias mismatch. l1: {F.l1_loss(custom_conv.bias, nn_conv.bias)}"
@runif(min_gpus=1)
@pytest.mark.parametrize(
*gen_params(["stride", "padding"], itertools.product(range(1, 3), range(2)))
)
def test_conv2d_with_stylegan_gpu(stride: int, padding: int) -> None:
conv2d_gradfix.enabled = True
lr = 1e-2
in_channels = 3
out_channels = 10
kernel_size = 5
embedding_size = 10
x_custom = torch.randn(
size=(2, in_channels, embedding_size, embedding_size),
requires_grad=True,
device="cuda",
)
x_original = x_custom.detach().requires_grad_()
custom_conv = conv_gradfix.GFixConv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
).cuda()
weight = nn.Parameter(custom_conv.weight.data)
bias = nn.Parameter(custom_conv.bias.data)
custom_parameters = [x_custom, *custom_conv.parameters()]
original_parameters = [x_original, weight, bias]
optimizer_custom = torch.optim.Adam(custom_parameters, lr=lr)
optimizer_original = torch.optim.Adam(original_parameters, lr=lr)
optimizer_custom.zero_grad()
optimizer_original.zero_grad()
y_custom: Tensor = custom_conv(x_custom)
y_original: Tensor = conv2d_gradfix.conv2d(
input=x_original,
weight=weight,
bias=bias,
stride=stride,
padding=padding,
)
loss_custom = y_custom.square().sum()
loss_original = y_original.square().sum()
weight_grad_custom = torch.autograd.grad(
outputs=[loss_custom], inputs=[custom_conv.weight], create_graph=True
)[0]
weight_grad_original = torch.autograd.grad(
outputs=[loss_original], inputs=[weight], create_graph=True
)[0]
assert torch.allclose(
y_custom, y_original
), f"Forward output mismatch. l1: {F.l1_loss(y_custom, y_original)}"
assert torch.allclose(
weight_grad_custom, weight_grad_original
), f"Forward weight_grad mismatch. l1: {F.l1_loss(weight_grad_custom, weight_grad_original)}"
loss_custom += weight_grad_custom.square().sum()
loss_original += weight_grad_original.square().sum()
loss_custom.backward()
loss_original.backward()
optimizer_custom.step()
optimizer_original.step()
assert torch.allclose(
x_custom, x_original
), f"Backward input mismatch. l1: {F.l1_loss(x_custom, x_original)}"
assert torch.allclose(
custom_conv.weight, weight
), f"Backward weight mismatch. l1: {F.l1_loss(custom_conv.weight, weight)}"
assert torch.allclose(
custom_conv.bias, bias
), f"Backward bias mismatch. l1: {F.l1_loss(custom_conv.bias, bias)}"
conv2d_gradfix.enabled = False
@runif(min_gpus=1)
@pytest.mark.parametrize(
*gen_params(["stride", "padding"], itertools.product(range(1, 3), range(2)))
)
def test_conv_transpose2d_with_stylegan_gpu(stride: int, padding: int) -> None:
conv2d_gradfix.enabled = True
lr = 1e-2
in_channels = 3
out_channels = 10
kernel_size = 5
embedding_size = 10
x_custom = torch.randn(
size=(2, in_channels, embedding_size, embedding_size),
requires_grad=True,
device="cuda",
)
x_original = x_custom.detach().requires_grad_()
custom_conv = conv_gradfix.GFixConvTranspose2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
).cuda()
weight = nn.Parameter(custom_conv.weight.data)
bias = nn.Parameter(custom_conv.bias.data)
custom_parameters = [x_custom, *custom_conv.parameters()]
original_parameters = [x_original, weight, bias]
optimizer_custom = torch.optim.Adam(custom_parameters, lr=lr)
optimizer_original = torch.optim.Adam(original_parameters, lr=lr)
optimizer_custom.zero_grad()
optimizer_original.zero_grad()
y_custom: Tensor = custom_conv(x_custom)
y_original: Tensor = conv2d_gradfix.conv_transpose2d(
input=x_original,
weight=weight,
bias=bias,
stride=stride,
padding=padding,
)
loss_custom = y_custom.square().sum()
loss_original = y_original.square().sum()
weight_grad_custom = torch.autograd.grad(
outputs=[loss_custom], inputs=[custom_conv.weight], create_graph=True
)[0]
weight_grad_original = torch.autograd.grad(
outputs=[loss_original], inputs=[weight], create_graph=True
)[0]
assert torch.allclose(
y_custom, y_original, atol=1e-5
), f"Forward output mismatch. l1: {F.l1_loss(y_custom, y_original)}"
assert torch.allclose(
weight_grad_custom, weight_grad_original, atol=1e-5
), f"Forward weight_grad mismatch. l1: {F.l1_loss(weight_grad_custom, weight_grad_original)}"
loss_custom += weight_grad_custom.square().sum()
loss_original += weight_grad_original.square().sum()
loss_custom.backward()
loss_original.backward()
optimizer_custom.step()
optimizer_original.step()
assert torch.allclose(
x_custom, x_original
), f"Backward input mismatch. l1: {F.l1_loss(x_custom, x_original)}"
assert torch.allclose(
custom_conv.weight, weight
), f"Backward weight mismatch. l1: {F.l1_loss(custom_conv.weight, weight)}"
assert torch.allclose(
custom_conv.bias, bias
), f"Backward bias mismatch. l1: {F.l1_loss(custom_conv.bias, bias)}"
conv2d_gradfix.enabled = False
@runif(tensorflow_installed=True)
@pytest.mark.parametrize(
*gen_params(
["rank", "transposed", "stride"],
itertools.product([1, 2, 3], [False, True], [1, 2]),
)
)
def test_same_padding_with_tensorflow(
rank: int, transposed: bool, stride: int
) -> None:
import tensorflow as tf
lr = 1e-2
padding = "same"
in_channels = 3
out_channels = 3
kernel_size = 4
embedding_size = 19
tf_input = tf.Variable(
tf.random.normal((2, *(embedding_size,) * rank, in_channels)),
)
original_input = torch.tensor(tf_input.numpy()).permute(
0, -1, *range(1, 1 + rank)
)
torch_input = original_input.clone().requires_grad_()
tf_conv = getattr(
tf.keras.layers,
f"Conv{rank}DTranspose" if transposed else f"Conv{rank}D",
)(
filters=out_channels,
kernel_size=kernel_size,
strides=stride,
padding=padding,
data_format="channels_last",
use_bias=True,
)
tf_conv.build(tf_input.shape)
torch_conv: Type[conv_gradfix._GFixConvNd] = getattr(
conv_gradfix,
f"GFixConvTranspose{rank}d" if transposed else f"GFixConv{rank}d",
)(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=True,
)
torch_conv.weight.data = torch.tensor(tf_conv.kernel.numpy()).permute(
*range(rank + 1, rank - 1, -1), *range(rank)
)
torch_conv.bias.data = torch.tensor(tf_conv.bias.numpy())
tf_optimizer = tf.optimizers.Adam(
learning_rate=lr, beta_1=0.9, beta_2=0.999, epsilon=1e-7
)
torch_optimizer = torch.optim.Adam(
[torch_input, *torch_conv.parameters()],
lr=lr,
betas=(0.9, 0.999),
eps=1e-7,
)
with tf.GradientTape() as tape:
tf_output = tf_conv(tf_input)
loss_tf = tf.reduce_sum(tf_output**2)
gradient_tf = tape.gradient(
loss_tf, [tf_input, *tf_conv.trainable_variables]
)
tf_optimizer.apply_gradients(
zip(gradient_tf, [tf_input, *tf_conv.trainable_variables])
)
torch_optimizer.zero_grad()
torch_output: Tensor = torch_conv(torch_input)
loss_torch = torch_output.square().sum()
loss_torch.backward()
torch_optimizer.step()
tf_input_torch = torch.tensor(tf_input.numpy()).permute(
0, -1, *range(1, 1 + rank)
)
tf_output_torch = torch.tensor(tf_output.numpy()).permute(
0, -1, *range(1, 1 + rank)
)
tf_weight_torch = torch.tensor(tf_conv.kernel.numpy()).permute(
*range(rank + 1, rank - 1, -1), *range(rank)
)
assert torch.allclose(
tf_output_torch, torch_output, rtol=1e-4, atol=1e-5
), f"Forward output mismatch. l1: {F.l1_loss(tf_output_torch, torch_output)}"
assert torch.allclose(
tf_weight_torch, torch_conv.weight, rtol=1e-4, atol=1e-5
), f"Backward weight mismatch. l1: {F.l1_loss(tf_weight_torch, torch_conv.weight)}"
# If print inputs and compare it, it's almost equal.
# However, the absolut tolerance value requires a high value.
assert torch.allclose(
tf_input_torch, torch_input, rtol=1e-4, atol=1e-2
), f"Backward input mismatch. l1: {F.l1_loss(tf_input_torch, torch_input)}"
assert not torch.allclose(
original_input, torch_input, rtol=1e-4, atol=1e-5
), f"Input value should be changed after backward. l1: {F.l1_loss(original_input, torch_input)}"
| 32.239861
| 116
| 0.655393
| 3,660
| 27,823
| 4.713934
| 0.05765
| 0.039413
| 0.010723
| 0.019301
| 0.881064
| 0.86582
| 0.853533
| 0.837014
| 0.82774
| 0.823045
| 0
| 0.01677
| 0.224167
| 27,823
| 862
| 117
| 32.277262
| 0.782498
| 0.003954
| 0
| 0.757697
| 0
| 0.001339
| 0.119776
| 0.031902
| 0
| 0
| 0
| 0
| 0.053548
| 1
| 0.014726
| false
| 0
| 0.017403
| 0
| 0.032129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
756815069db38b3b38527bb14fdb9be87cd80303
| 43,829
|
py
|
Python
|
tests/test_praw_scrapers/test_live_scrapers/test_utils/test_DisplayStream.py
|
JosephLai241/Reddit-Scraper
|
9f8cf3a3adb9aa5079dfc7bfd7832b53358ee40f
|
[
"MIT"
] | 318
|
2020-06-25T00:54:23.000Z
|
2022-03-29T20:02:43.000Z
|
tests/test_praw_scrapers/test_live_scrapers/test_utils/test_DisplayStream.py
|
JosephLai241/Reddit-Scraper
|
9f8cf3a3adb9aa5079dfc7bfd7832b53358ee40f
|
[
"MIT"
] | 26
|
2020-06-21T04:45:27.000Z
|
2022-03-17T21:37:21.000Z
|
tests/test_praw_scrapers/test_live_scrapers/test_utils/test_DisplayStream.py
|
JosephLai241/Reddit-Scraper
|
9f8cf3a3adb9aa5079dfc7bfd7832b53358ee40f
|
[
"MIT"
] | 74
|
2020-06-22T21:37:54.000Z
|
2022-03-15T23:38:02.000Z
|
"""
Testing `DisplayStream.py`.
"""
from prettytable import PrettyTable
from urs.praw_scrapers.live_scrapers.utils import DisplayStream
class CreatePrettyTable():
"""
Create a PrettyTable for testing.
"""
@staticmethod
def create():
pretty_stream = PrettyTable()
pretty_stream.field_names = [
"Attribute",
"Data"
]
return pretty_stream
class TestDisplayStreamPopulateTableMethod():
"""
Testing DisplayStream class _populate_table() method.
"""
def test_populate_table_method_with_comment_object(self):
test_include_fields = [
"author",
"body",
"created_utc",
"distinguished",
"edited",
"id",
"is_submitter",
"link_id",
"parent_id",
"score",
"stickied"
]
test_object = {
"author": "u/Flatworm1",
"body": "They all went to bed hours ago.",
"body_html": "<div class=\"md\"><p>They all went to bed hours ago.</p>\n</div>",
"created_utc": "05-23-2021 20:47:55",
"distinguished": None,
"edited": False,
"id": "gz811x5",
"is_submitter": False,
"link_id": "t3_njlm8y",
"parent_id": "t3_njlm8y",
"score": 1,
"stickied": False,
"submission": {
"author": "u/Lanre-Haliax",
"created_utc": "05-23-2021 20:46:39",
"distinguished": None,
"edited": False,
"id": "njlm8y",
"is_original_content": False,
"is_self": True,
"link_flair_text": None,
"locked": False,
"name": "t3_njlm8y",
"nsfw": False,
"num_comments": 2,
"permalink": "/r/AskReddit/comments/njlm8y/people_who_where_at_woodstock_in_1969_how_was_it/",
"score": 4,
"selftext": "",
"spoiler": False,
"stickied": False,
"subreddit": {
"can_assign_link_flair": False,
"can_assign_user_flair": False,
"created_utc": "01-24-2008 22:52:15",
"description": "###### [ [ SERIOUS ] ](http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D)\n\n\n##### [Rules](https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules):\n1. You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-)\n\n2. Any post asking for advice should be generic and not specific to your situation alone. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-)\n\n3. Askreddit is for open-ended discussion questions. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-)\n\n4. Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-)\n\n5. Askreddit is not your soapbox, personal army, or advertising platform. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-)\n\n6. [Serious] tagged posts are off-limits to jokes or irrelevant replies. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n7. Soliciting money, goods, services, or favours is not allowed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-)\n\n8. Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-)\n\n9. Comment replies consisting solely of images will be removed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-)\n\n##### If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to [contact the mods](https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit), we're happy to help.\n\n---\n\n#### Tags to use:\n\n> ## [[Serious]](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n### Use a **[Serious]** post tag to designate your post as a serious, on-topic-only thread.\n\n-\n\n#### Filter posts by subject:\n\n[Mod posts](http://ud.reddit.com/r/AskReddit/#ud)\n[Serious posts](https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all)\n[Megathread](http://bu.reddit.com/r/AskReddit/#bu)\n[Breaking news](http://nr.reddit.com/r/AskReddit/#nr)\n[Unfilter](/r/AskReddit)\n\n\n-\n\n### Please use spoiler tags to hide spoilers. `>!insert spoiler here!<`\n\n-\n\n#### Other subreddits you might like:\nsome|header\n:---|:---\n[Ask Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others)|[Self & Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others)\n[Find a subreddit](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit)|[Learn something](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something)\n[Meta Subs](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta)|[What is this ___](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______)\n[AskReddit Offshoots](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots)|[Offers & Assistance](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance)\n\n\n-\n\n### Ever read the reddiquette? [Take a peek!](/wiki/reddiquette)\n\n[](#/RES_SR_Config/NightModeCompatible)",
"description_html": "<!-- SC_OFF --><div class=\"md\"><h6><a href=\"http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D\"> [ SERIOUS ] </a></h6>\n\n<h5><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules\">Rules</a>:</h5>\n\n<ol>\n<li><p>You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-\">more >></a></p></li>\n<li><p>Any post asking for advice should be generic and not specific to your situation alone. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-\">more >></a></p></li>\n<li><p>Askreddit is for open-ended discussion questions. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-\">more >></a></p></li>\n<li><p>Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-\">more >></a></p></li>\n<li><p>Askreddit is not your soapbox, personal army, or advertising platform. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-\">more >></a></p></li>\n<li><p>[Serious] tagged posts are off-limits to jokes or irrelevant replies. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">more >></a></p></li>\n<li><p>Soliciting money, goods, services, or favours is not allowed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-\">more >></a></p></li>\n<li><p>Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-\">more >></a></p></li>\n<li><p>Comment replies consisting solely of images will be removed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-\">more >></a></p></li>\n</ol>\n\n<h5>If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to <a href=\"https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit\">contact the mods</a>, we're happy to help.</h5>\n\n<hr/>\n\n<h4>Tags to use:</h4>\n\n<blockquote>\n<h2><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">[Serious]</a></h2>\n</blockquote>\n\n<h3>Use a <strong>[Serious]</strong> post tag to designate your post as a serious, on-topic-only thread.</h3>\n\n<h2></h2>\n\n<h4>Filter posts by subject:</h4>\n\n<p><a href=\"http://ud.reddit.com/r/AskReddit/#ud\">Mod posts</a>\n<a href=\"https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all\">Serious posts</a>\n<a href=\"http://bu.reddit.com/r/AskReddit/#bu\">Megathread</a>\n<a href=\"http://nr.reddit.com/r/AskReddit/#nr\">Breaking news</a>\n<a href=\"/r/AskReddit\">Unfilter</a></p>\n\n<h2></h2>\n\n<h3>Please use spoiler tags to hide spoilers. <code>>!insert spoiler here!<</code></h3>\n\n<h2></h2>\n\n<h4>Other subreddits you might like:</h4>\n\n<table><thead>\n<tr>\n<th align=\"left\">some</th>\n<th align=\"left\">header</th>\n</tr>\n</thead><tbody>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others\">Ask Others</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others\">Self & Others</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit\">Find a subreddit</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something\">Learn something</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta\">Meta Subs</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______\">What is this ___</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots\">AskReddit Offshoots</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance\">Offers & Assistance</a></td>\n</tr>\n</tbody></table>\n\n<h2></h2>\n\n<h3>Ever read the reddiquette? <a href=\"/wiki/reddiquette\">Take a peek!</a></h3>\n\n<p><a href=\"#/RES_SR_Config/NightModeCompatible\"></a></p>\n</div><!-- SC_ON -->",
"display_name": "AskReddit",
"id": "2qh1i",
"name": "t5_2qh1i",
"nsfw": False,
"public_description": "r/AskReddit is the place to ask and answer thought-provoking questions.",
"spoilers_enabled": True,
"subscribers": 32355402,
"user_is_banned": False,
"user_is_moderator": False,
"user_is_subscriber": False
},
"title": "People who where at Woodstock in 1969, how was it?",
"type": "submission",
"upvote_ratio": 1.0,
"url": "https://www.reddit.com/r/AskReddit/comments/njlm8y/people_who_where_at_woodstock_in_1969_how_was_it/"
},
"subreddit_id": "t5_2qh1i",
"type": "comment"
}
test_prefix = ""
test_pretty_stream = CreatePrettyTable.create()
try:
DisplayStream.DisplayStream._populate_table(test_include_fields, test_object, test_prefix, test_pretty_stream)
assert True
except Exception as e:
print("An exception has occurred when attempting to populate PrettyStream:", e)
assert False
def test_populate_table_method_with_submission_object(self):
test_include_fields = [
"author",
"created_utc",
"distinguished",
"edited",
"id",
"is_original_content",
"is_self",
"link_flair_text",
"nsfw",
"score",
"selftext",
"spoiler",
"stickied",
"title",
"url"
]
test_object = {
"author": "u/NuclearWinterGames",
"created_utc": "05-24-2021 20:36:44",
"distinguished": None,
"edited": False,
"id": "nkcrtr",
"is_original_content": False,
"is_self": True,
"link_flair_text": None,
"locked": False,
"name": "t3_nkcrtr",
"nsfw": False,
"num_comments": 0,
"permalink": "/r/AskReddit/comments/nkcrtr/which_song_defines_your_childhood/",
"score": 1,
"selftext": "",
"spoiler": False,
"stickied": False,
"subreddit": {
"can_assign_link_flair": False,
"can_assign_user_flair": False,
"created_utc": "01-24-2008 22:52:15",
"description": "###### [ [ SERIOUS ] ](http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D)\n\n\n##### [Rules](https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules):\n1. You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-)\n\n2. Any post asking for advice should be generic and not specific to your situation alone. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-)\n\n3. Askreddit is for open-ended discussion questions. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-)\n\n4. Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-)\n\n5. Askreddit is not your soapbox, personal army, or advertising platform. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-)\n\n6. [Serious] tagged posts are off-limits to jokes or irrelevant replies. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n7. Soliciting money, goods, services, or favours is not allowed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-)\n\n8. Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-)\n\n9. Comment replies consisting solely of images will be removed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-)\n\n##### If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to [contact the mods](https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit), we're happy to help.\n\n---\n\n#### Tags to use:\n\n> ## [[Serious]](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n### Use a **[Serious]** post tag to designate your post as a serious, on-topic-only thread.\n\n-\n\n#### Filter posts by subject:\n\n[Mod posts](http://ud.reddit.com/r/AskReddit/#ud)\n[Serious posts](https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all)\n[Megathread](http://bu.reddit.com/r/AskReddit/#bu)\n[Breaking news](http://nr.reddit.com/r/AskReddit/#nr)\n[Unfilter](/r/AskReddit)\n\n\n-\n\n### Please use spoiler tags to hide spoilers. `>!insert spoiler here!<`\n\n-\n\n#### Other subreddits you might like:\nsome|header\n:---|:---\n[Ask Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others)|[Self & Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others)\n[Find a subreddit](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit)|[Learn something](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something)\n[Meta Subs](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta)|[What is this ___](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______)\n[AskReddit Offshoots](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots)|[Offers & Assistance](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance)\n\n\n-\n\n### Ever read the reddiquette? [Take a peek!](/wiki/reddiquette)\n\n[](#/RES_SR_Config/NightModeCompatible)",
"description_html": "<!-- SC_OFF --><div class=\"md\"><h6><a href=\"http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D\"> [ SERIOUS ] </a></h6>\n\n<h5><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules\">Rules</a>:</h5>\n\n<ol>\n<li><p>You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-\">more >></a></p></li>\n<li><p>Any post asking for advice should be generic and not specific to your situation alone. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-\">more >></a></p></li>\n<li><p>Askreddit is for open-ended discussion questions. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-\">more >></a></p></li>\n<li><p>Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-\">more >></a></p></li>\n<li><p>Askreddit is not your soapbox, personal army, or advertising platform. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-\">more >></a></p></li>\n<li><p>[Serious] tagged posts are off-limits to jokes or irrelevant replies. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">more >></a></p></li>\n<li><p>Soliciting money, goods, services, or favours is not allowed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-\">more >></a></p></li>\n<li><p>Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-\">more >></a></p></li>\n<li><p>Comment replies consisting solely of images will be removed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-\">more >></a></p></li>\n</ol>\n\n<h5>If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to <a href=\"https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit\">contact the mods</a>, we're happy to help.</h5>\n\n<hr/>\n\n<h4>Tags to use:</h4>\n\n<blockquote>\n<h2><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">[Serious]</a></h2>\n</blockquote>\n\n<h3>Use a <strong>[Serious]</strong> post tag to designate your post as a serious, on-topic-only thread.</h3>\n\n<h2></h2>\n\n<h4>Filter posts by subject:</h4>\n\n<p><a href=\"http://ud.reddit.com/r/AskReddit/#ud\">Mod posts</a>\n<a href=\"https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all\">Serious posts</a>\n<a href=\"http://bu.reddit.com/r/AskReddit/#bu\">Megathread</a>\n<a href=\"http://nr.reddit.com/r/AskReddit/#nr\">Breaking news</a>\n<a href=\"/r/AskReddit\">Unfilter</a></p>\n\n<h2></h2>\n\n<h3>Please use spoiler tags to hide spoilers. <code>>!insert spoiler here!<</code></h3>\n\n<h2></h2>\n\n<h4>Other subreddits you might like:</h4>\n\n<table><thead>\n<tr>\n<th align=\"left\">some</th>\n<th align=\"left\">header</th>\n</tr>\n</thead><tbody>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others\">Ask Others</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others\">Self & Others</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit\">Find a subreddit</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something\">Learn something</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta\">Meta Subs</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______\">What is this ___</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots\">AskReddit Offshoots</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance\">Offers & Assistance</a></td>\n</tr>\n</tbody></table>\n\n<h2></h2>\n\n<h3>Ever read the reddiquette? <a href=\"/wiki/reddiquette\">Take a peek!</a></h3>\n\n<p><a href=\"#/RES_SR_Config/NightModeCompatible\"></a></p>\n</div><!-- SC_ON -->",
"display_name": "AskReddit",
"id": "2qh1i",
"name": "t5_2qh1i",
"nsfw": False,
"public_description": "r/AskReddit is the place to ask and answer thought-provoking questions.",
"spoilers_enabled": True,
"subscribers": 32366338,
"user_is_banned": False,
"user_is_moderator": False,
"user_is_subscriber": False
},
"title": "Which song defines your childhood?",
"type": "submission",
"upvote_ratio": 1.0,
"url": "https://www.reddit.com/r/AskReddit/comments/nkcrtr/which_song_defines_your_childhood/"
}
test_prefix = ""
test_pretty_stream = CreatePrettyTable.create()
try:
DisplayStream.DisplayStream._populate_table(test_include_fields, test_object, test_prefix, test_pretty_stream)
assert True
except Exception as e:
print("An exception has occurred when attempting to populate PrettyStream:", e)
assert False
class TestDisplayStreamDisplayMethod():
"""
Testing DisplayStream class display() method.
"""
def test_display_method_comment_object(self):
test_comment_object = {
"author": "u/Flatworm1",
"body": "They all went to bed hours ago.",
"body_html": "<div class=\"md\"><p>They all went to bed hours ago.</p>\n</div>",
"created_utc": "05-23-2021 20:47:55",
"distinguished": None,
"edited": False,
"id": "gz811x5",
"is_submitter": False,
"link_id": "t3_njlm8y",
"parent_id": "t3_njlm8y",
"score": 1,
"stickied": False,
"submission": {
"author": "u/Lanre-Haliax",
"created_utc": "05-23-2021 20:46:39",
"distinguished": None,
"edited": False,
"id": "njlm8y",
"is_original_content": False,
"is_self": True,
"link_flair_text": None,
"locked": False,
"name": "t3_njlm8y",
"nsfw": False,
"num_comments": 2,
"permalink": "/r/AskReddit/comments/njlm8y/people_who_where_at_woodstock_in_1969_how_was_it/",
"score": 4,
"selftext": "",
"spoiler": False,
"stickied": False,
"subreddit": {
"can_assign_link_flair": False,
"can_assign_user_flair": False,
"created_utc": "01-24-2008 22:52:15",
"description": "###### [ [ SERIOUS ] ](http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D)\n\n\n##### [Rules](https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules):\n1. You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-)\n\n2. Any post asking for advice should be generic and not specific to your situation alone. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-)\n\n3. Askreddit is for open-ended discussion questions. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-)\n\n4. Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-)\n\n5. Askreddit is not your soapbox, personal army, or advertising platform. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-)\n\n6. [Serious] tagged posts are off-limits to jokes or irrelevant replies. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n7. Soliciting money, goods, services, or favours is not allowed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-)\n\n8. Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-)\n\n9. Comment replies consisting solely of images will be removed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-)\n\n##### If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to [contact the mods](https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit), we're happy to help.\n\n---\n\n#### Tags to use:\n\n> ## [[Serious]](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n### Use a **[Serious]** post tag to designate your post as a serious, on-topic-only thread.\n\n-\n\n#### Filter posts by subject:\n\n[Mod posts](http://ud.reddit.com/r/AskReddit/#ud)\n[Serious posts](https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all)\n[Megathread](http://bu.reddit.com/r/AskReddit/#bu)\n[Breaking news](http://nr.reddit.com/r/AskReddit/#nr)\n[Unfilter](/r/AskReddit)\n\n\n-\n\n### Please use spoiler tags to hide spoilers. `>!insert spoiler here!<`\n\n-\n\n#### Other subreddits you might like:\nsome|header\n:---|:---\n[Ask Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others)|[Self & Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others)\n[Find a subreddit](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit)|[Learn something](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something)\n[Meta Subs](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta)|[What is this ___](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______)\n[AskReddit Offshoots](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots)|[Offers & Assistance](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance)\n\n\n-\n\n### Ever read the reddiquette? [Take a peek!](/wiki/reddiquette)\n\n[](#/RES_SR_Config/NightModeCompatible)",
"description_html": "<!-- SC_OFF --><div class=\"md\"><h6><a href=\"http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D\"> [ SERIOUS ] </a></h6>\n\n<h5><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules\">Rules</a>:</h5>\n\n<ol>\n<li><p>You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-\">more >></a></p></li>\n<li><p>Any post asking for advice should be generic and not specific to your situation alone. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-\">more >></a></p></li>\n<li><p>Askreddit is for open-ended discussion questions. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-\">more >></a></p></li>\n<li><p>Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-\">more >></a></p></li>\n<li><p>Askreddit is not your soapbox, personal army, or advertising platform. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-\">more >></a></p></li>\n<li><p>[Serious] tagged posts are off-limits to jokes or irrelevant replies. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">more >></a></p></li>\n<li><p>Soliciting money, goods, services, or favours is not allowed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-\">more >></a></p></li>\n<li><p>Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-\">more >></a></p></li>\n<li><p>Comment replies consisting solely of images will be removed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-\">more >></a></p></li>\n</ol>\n\n<h5>If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to <a href=\"https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit\">contact the mods</a>, we're happy to help.</h5>\n\n<hr/>\n\n<h4>Tags to use:</h4>\n\n<blockquote>\n<h2><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">[Serious]</a></h2>\n</blockquote>\n\n<h3>Use a <strong>[Serious]</strong> post tag to designate your post as a serious, on-topic-only thread.</h3>\n\n<h2></h2>\n\n<h4>Filter posts by subject:</h4>\n\n<p><a href=\"http://ud.reddit.com/r/AskReddit/#ud\">Mod posts</a>\n<a href=\"https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all\">Serious posts</a>\n<a href=\"http://bu.reddit.com/r/AskReddit/#bu\">Megathread</a>\n<a href=\"http://nr.reddit.com/r/AskReddit/#nr\">Breaking news</a>\n<a href=\"/r/AskReddit\">Unfilter</a></p>\n\n<h2></h2>\n\n<h3>Please use spoiler tags to hide spoilers. <code>>!insert spoiler here!<</code></h3>\n\n<h2></h2>\n\n<h4>Other subreddits you might like:</h4>\n\n<table><thead>\n<tr>\n<th align=\"left\">some</th>\n<th align=\"left\">header</th>\n</tr>\n</thead><tbody>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others\">Ask Others</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others\">Self & Others</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit\">Find a subreddit</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something\">Learn something</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta\">Meta Subs</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______\">What is this ___</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots\">AskReddit Offshoots</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance\">Offers & Assistance</a></td>\n</tr>\n</tbody></table>\n\n<h2></h2>\n\n<h3>Ever read the reddiquette? <a href=\"/wiki/reddiquette\">Take a peek!</a></h3>\n\n<p><a href=\"#/RES_SR_Config/NightModeCompatible\"></a></p>\n</div><!-- SC_ON -->",
"display_name": "AskReddit",
"id": "2qh1i",
"name": "t5_2qh1i",
"nsfw": False,
"public_description": "r/AskReddit is the place to ask and answer thought-provoking questions.",
"spoilers_enabled": True,
"subscribers": 32355402,
"user_is_banned": False,
"user_is_moderator": False,
"user_is_subscriber": False
},
"title": "People who where at Woodstock in 1969, how was it?",
"type": "submission",
"upvote_ratio": 1.0,
"url": "https://www.reddit.com/r/AskReddit/comments/njlm8y/people_who_where_at_woodstock_in_1969_how_was_it/"
},
"subreddit_id": "t5_2qh1i",
"type": "comment"
}
try:
DisplayStream.DisplayStream.display(test_comment_object)
assert True
except Exception as e:
print("An exception has occurred when attempting to display comment in stream:", e)
assert False
def test_display_method_submission_object(self):
test_submission_object = {
"author": "u/NuclearWinterGames",
"created_utc": "05-24-2021 20:36:44",
"distinguished": None,
"edited": False,
"id": "nkcrtr",
"is_original_content": False,
"is_self": True,
"link_flair_text": None,
"locked": False,
"name": "t3_nkcrtr",
"nsfw": False,
"num_comments": 0,
"permalink": "/r/AskReddit/comments/nkcrtr/which_song_defines_your_childhood/",
"score": 1,
"selftext": "",
"spoiler": False,
"stickied": False,
"subreddit": {
"can_assign_link_flair": False,
"can_assign_user_flair": False,
"created_utc": "01-24-2008 22:52:15",
"description": "###### [ [ SERIOUS ] ](http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D)\n\n\n##### [Rules](https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules):\n1. You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-)\n\n2. Any post asking for advice should be generic and not specific to your situation alone. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-)\n\n3. Askreddit is for open-ended discussion questions. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-)\n\n4. Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-)\n\n5. Askreddit is not your soapbox, personal army, or advertising platform. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-)\n\n6. [Serious] tagged posts are off-limits to jokes or irrelevant replies. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n7. Soliciting money, goods, services, or favours is not allowed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-)\n\n8. Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-)\n\n9. Comment replies consisting solely of images will be removed. [more >>](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-)\n\n##### If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to [contact the mods](https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit), we're happy to help.\n\n---\n\n#### Tags to use:\n\n> ## [[Serious]](https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-)\n\n### Use a **[Serious]** post tag to designate your post as a serious, on-topic-only thread.\n\n-\n\n#### Filter posts by subject:\n\n[Mod posts](http://ud.reddit.com/r/AskReddit/#ud)\n[Serious posts](https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all)\n[Megathread](http://bu.reddit.com/r/AskReddit/#bu)\n[Breaking news](http://nr.reddit.com/r/AskReddit/#nr)\n[Unfilter](/r/AskReddit)\n\n\n-\n\n### Please use spoiler tags to hide spoilers. `>!insert spoiler here!<`\n\n-\n\n#### Other subreddits you might like:\nsome|header\n:---|:---\n[Ask Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others)|[Self & Others](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others)\n[Find a subreddit](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit)|[Learn something](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something)\n[Meta Subs](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta)|[What is this ___](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______)\n[AskReddit Offshoots](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots)|[Offers & Assistance](https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance)\n\n\n-\n\n### Ever read the reddiquette? [Take a peek!](/wiki/reddiquette)\n\n[](#/RES_SR_Config/NightModeCompatible)",
"description_html": "<!-- SC_OFF --><div class=\"md\"><h6><a href=\"http://www.reddit.com/r/askreddit/submit?selftext=true&title=%5BSerious%5D\"> [ SERIOUS ] </a></h6>\n\n<h5><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_rules\">Rules</a>:</h5>\n\n<ol>\n<li><p>You must post a clear and direct question in the title. The title may contain two, short, necessary context sentences.\nNo text is allowed in the textbox. Your thoughts/responses to the question can go in the comments section. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_1-\">more >></a></p></li>\n<li><p>Any post asking for advice should be generic and not specific to your situation alone. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_2-\">more >></a></p></li>\n<li><p>Askreddit is for open-ended discussion questions. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_3-\">more >></a></p></li>\n<li><p>Posting, or seeking, any identifying personal information, real or fake, will result in a ban without a prior warning. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_4-\">more >></a></p></li>\n<li><p>Askreddit is not your soapbox, personal army, or advertising platform. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_5-\">more >></a></p></li>\n<li><p>[Serious] tagged posts are off-limits to jokes or irrelevant replies. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">more >></a></p></li>\n<li><p>Soliciting money, goods, services, or favours is not allowed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_7-\">more >></a></p></li>\n<li><p>Mods reserve the right to remove content or restrict users' posting privileges as necessary if it is deemed detrimental to the subreddit or to the experience of others. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_8-\">more >></a></p></li>\n<li><p>Comment replies consisting solely of images will be removed. <a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_9-\">more >></a></p></li>\n</ol>\n\n<h5>If you think your post has disappeared, see spam or an inappropriate post, please do not hesitate to <a href=\"https://www.reddit.com/message/compose?to=%2Fr%2FAskReddit\">contact the mods</a>, we're happy to help.</h5>\n\n<hr/>\n\n<h4>Tags to use:</h4>\n\n<blockquote>\n<h2><a href=\"https://www.reddit.com/r/AskReddit/wiki/index#wiki_-rule_6-\">[Serious]</a></h2>\n</blockquote>\n\n<h3>Use a <strong>[Serious]</strong> post tag to designate your post as a serious, on-topic-only thread.</h3>\n\n<h2></h2>\n\n<h4>Filter posts by subject:</h4>\n\n<p><a href=\"http://ud.reddit.com/r/AskReddit/#ud\">Mod posts</a>\n<a href=\"https://www.reddit.com/r/AskReddit/search/?q=flair%3Aserious&sort=new&restrict_sr=on&t=all\">Serious posts</a>\n<a href=\"http://bu.reddit.com/r/AskReddit/#bu\">Megathread</a>\n<a href=\"http://nr.reddit.com/r/AskReddit/#nr\">Breaking news</a>\n<a href=\"/r/AskReddit\">Unfilter</a></p>\n\n<h2></h2>\n\n<h3>Please use spoiler tags to hide spoilers. <code>>!insert spoiler here!<</code></h3>\n\n<h2></h2>\n\n<h4>Other subreddits you might like:</h4>\n\n<table><thead>\n<tr>\n<th align=\"left\">some</th>\n<th align=\"left\">header</th>\n</tr>\n</thead><tbody>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_ask_others\">Ask Others</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_self_.26amp.3B_others\">Self & Others</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_find_a_subreddit\">Find a subreddit</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_learn_something\">Learn something</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_meta\">Meta Subs</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_what_is_this______\">What is this ___</a></td>\n</tr>\n<tr>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_askreddit_offshoots\">AskReddit Offshoots</a></td>\n<td align=\"left\"><a href=\"https://www.reddit.com/r/AskReddit/wiki/sidebarsubs#wiki_offers_.26amp.3B_assistance\">Offers & Assistance</a></td>\n</tr>\n</tbody></table>\n\n<h2></h2>\n\n<h3>Ever read the reddiquette? <a href=\"/wiki/reddiquette\">Take a peek!</a></h3>\n\n<p><a href=\"#/RES_SR_Config/NightModeCompatible\"></a></p>\n</div><!-- SC_ON -->",
"display_name": "AskReddit",
"id": "2qh1i",
"name": "t5_2qh1i",
"nsfw": False,
"public_description": "r/AskReddit is the place to ask and answer thought-provoking questions.",
"spoilers_enabled": True,
"subscribers": 32366338,
"user_is_banned": False,
"user_is_moderator": False,
"user_is_subscriber": False
},
"title": "Which song defines your childhood?",
"type": "submission",
"upvote_ratio": 1.0,
"url": "https://www.reddit.com/r/AskReddit/comments/nkcrtr/which_song_defines_your_childhood/"
}
try:
DisplayStream.DisplayStream.display(test_submission_object)
assert True
except Exception as e:
print("An exception has occurred when attempting to display submission in stream:", e)
assert False
| 146.096667
| 4,704
| 0.667663
| 6,869
| 43,829
| 4.161741
| 0.054593
| 0.07416
| 0.068563
| 0.130269
| 0.976143
| 0.9717
| 0.964984
| 0.964984
| 0.964984
| 0.964984
| 0
| 0.016072
| 0.145406
| 43,829
| 299
| 4,705
| 146.585284
| 0.747143
| 0.003673
| 0
| 0.816479
| 0
| 0.284644
| 0.680454
| 0.111842
| 0
| 0
| 0
| 0
| 0.029963
| 1
| 0.018727
| false
| 0
| 0.007491
| 0
| 0.041199
| 0.014981
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
756c29d7ab452677c24de51a2663f4c852798a84
| 186
|
py
|
Python
|
sigiTest/docs/doc_fun1.py
|
sinagilassi/vidstream
|
1b9d5a06045728981c66c4145e3fe2d63b299569
|
[
"MIT"
] | null | null | null |
sigiTest/docs/doc_fun1.py
|
sinagilassi/vidstream
|
1b9d5a06045728981c66c4145e3fe2d63b299569
|
[
"MIT"
] | null | null | null |
sigiTest/docs/doc_fun1.py
|
sinagilassi/vidstream
|
1b9d5a06045728981c66c4145e3fe2d63b299569
|
[
"MIT"
] | null | null | null |
from sigiTest.core.core_fun1 import cFun1_1
from sigiTest.core import cFun2_2
def dFun1_1():
return "dFun1_1" + cFun1_1() + " - " + cFun2_2()
def dFun1_2():
return "dFun1_2"
| 16.909091
| 52
| 0.682796
| 30
| 186
| 3.933333
| 0.4
| 0.20339
| 0.271186
| 0.237288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113333
| 0.193548
| 186
| 10
| 53
| 18.6
| 0.673333
| 0
| 0
| 0
| 0
| 0
| 0.091398
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
7579f8685cc58dce55773c81a56e2fe0aea35e05
| 4,420
|
py
|
Python
|
tests/model/test_retinanet.py
|
mhd53/retinanet-from-torch
|
656afde34c5a281a10160151c9e036718eb4a716
|
[
"MIT"
] | null | null | null |
tests/model/test_retinanet.py
|
mhd53/retinanet-from-torch
|
656afde34c5a281a10160151c9e036718eb4a716
|
[
"MIT"
] | null | null | null |
tests/model/test_retinanet.py
|
mhd53/retinanet-from-torch
|
656afde34c5a281a10160151c9e036718eb4a716
|
[
"MIT"
] | null | null | null |
import pytest
import torch
from model.model import retina_resnet50, retina_resnet101, retina_resnet152
BATCH_SIZE = 1
@pytest.fixture(scope="module")
def init_512x512_dummy_data():
return torch.randn((BATCH_SIZE, 3, 512, 512))
def test_retina_resnet50_out_shapes(init_512x512_dummy_data):
num_anchors = 9
num_classes = 20
data = init_512x512_dummy_data
model = retina_resnet50(num_classes)
outputs = model(data)
pred_logits = outputs["pred_logits"]
pred_bboxes = outputs["pred_bboxes"]
anchors = outputs["anchors"]
assert pred_logits[0].shape == (BATCH_SIZE, 64 * 64 * num_anchors, num_classes)
assert pred_logits[1].shape == (BATCH_SIZE, 32 * 32 * num_anchors, num_classes)
assert pred_logits[2].shape == (BATCH_SIZE, 16 * 16 * num_anchors, num_classes)
assert pred_logits[3].shape == (BATCH_SIZE, 8 * 8 * num_anchors, num_classes)
assert pred_logits[4].shape == (BATCH_SIZE, 4 * 4 * num_anchors, num_classes)
assert pred_bboxes[0].shape == (BATCH_SIZE, 64 * 64 * num_anchors, 4)
assert pred_bboxes[1].shape == (BATCH_SIZE, 32 * 32 * num_anchors, 4)
assert pred_bboxes[2].shape == (BATCH_SIZE, 16 * 16 * num_anchors, 4)
assert pred_bboxes[3].shape == (BATCH_SIZE, 8 * 8 * num_anchors, 4)
assert pred_bboxes[4].shape == (BATCH_SIZE, 4 * 4 * num_anchors, 4)
assert anchors[0].shape == (64 * 64 * num_anchors, 4)
assert anchors[1].shape == (32 * 32 * num_anchors, 4)
assert anchors[2].shape == (16 * 16 * num_anchors, 4)
assert anchors[3].shape == (8 * 8 * num_anchors, 4)
assert anchors[4].shape == (4 * 4 * num_anchors, 4)
def test_retina_resnet101_out_shapes(init_512x512_dummy_data):
num_anchors = 9
num_classes = 20
data = init_512x512_dummy_data
model = retina_resnet101(num_classes)
outputs = model(data)
pred_logits = outputs["pred_logits"]
pred_bboxes = outputs["pred_bboxes"]
anchors = outputs["anchors"]
assert pred_logits[0].shape == (BATCH_SIZE, 64 * 64 * num_anchors, num_classes)
assert pred_logits[1].shape == (BATCH_SIZE, 32 * 32 * num_anchors, num_classes)
assert pred_logits[2].shape == (BATCH_SIZE, 16 * 16 * num_anchors, num_classes)
assert pred_logits[3].shape == (BATCH_SIZE, 8 * 8 * num_anchors, num_classes)
assert pred_logits[4].shape == (BATCH_SIZE, 4 * 4 * num_anchors, num_classes)
assert pred_bboxes[0].shape == (BATCH_SIZE, 64 * 64 * num_anchors, 4)
assert pred_bboxes[1].shape == (BATCH_SIZE, 32 * 32 * num_anchors, 4)
assert pred_bboxes[2].shape == (BATCH_SIZE, 16 * 16 * num_anchors, 4)
assert pred_bboxes[3].shape == (BATCH_SIZE, 8 * 8 * num_anchors, 4)
assert pred_bboxes[4].shape == (BATCH_SIZE, 4 * 4 * num_anchors, 4)
assert anchors[0].shape == (64 * 64 * num_anchors, 4)
assert anchors[1].shape == (32 * 32 * num_anchors, 4)
assert anchors[2].shape == (16 * 16 * num_anchors, 4)
assert anchors[3].shape == (8 * 8 * num_anchors, 4)
assert anchors[4].shape == (4 * 4 * num_anchors, 4)
def test_retina_resnet152_out_shapes(init_512x512_dummy_data):
num_anchors = 9
num_classes = 20
data = init_512x512_dummy_data
model = retina_resnet152(num_classes)
outputs = model(data)
pred_logits = outputs["pred_logits"]
pred_bboxes = outputs["pred_bboxes"]
anchors = outputs["anchors"]
assert pred_logits[0].shape == (BATCH_SIZE, 64 * 64 * num_anchors, num_classes)
assert pred_logits[1].shape == (BATCH_SIZE, 32 * 32 * num_anchors, num_classes)
assert pred_logits[2].shape == (BATCH_SIZE, 16 * 16 * num_anchors, num_classes)
assert pred_logits[3].shape == (BATCH_SIZE, 8 * 8 * num_anchors, num_classes)
assert pred_logits[4].shape == (BATCH_SIZE, 4 * 4 * num_anchors, num_classes)
assert pred_bboxes[0].shape == (BATCH_SIZE, 64 * 64 * num_anchors, 4)
assert pred_bboxes[1].shape == (BATCH_SIZE, 32 * 32 * num_anchors, 4)
assert pred_bboxes[2].shape == (BATCH_SIZE, 16 * 16 * num_anchors, 4)
assert pred_bboxes[3].shape == (BATCH_SIZE, 8 * 8 * num_anchors, 4)
assert pred_bboxes[4].shape == (BATCH_SIZE, 4 * 4 * num_anchors, 4)
assert anchors[0].shape == (64 * 64 * num_anchors, 4)
assert anchors[1].shape == (32 * 32 * num_anchors, 4)
assert anchors[2].shape == (16 * 16 * num_anchors, 4)
assert anchors[3].shape == (8 * 8 * num_anchors, 4)
assert anchors[4].shape == (4 * 4 * num_anchors, 4)
| 43.762376
| 83
| 0.679186
| 670
| 4,420
| 4.208955
| 0.067164
| 0.170213
| 0.148936
| 0.162766
| 0.914539
| 0.914539
| 0.914539
| 0.914539
| 0.914539
| 0.914539
| 0
| 0.08424
| 0.188914
| 4,420
| 101
| 84
| 43.762376
| 0.702371
| 0
| 0
| 0.835443
| 0
| 0
| 0.021036
| 0
| 0
| 0
| 0
| 0
| 0.56962
| 1
| 0.050633
| false
| 0
| 0.037975
| 0.012658
| 0.101266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
75ae9052615e9da935ab8a9eb60f0400b8a9a6dd
| 37,473
|
py
|
Python
|
sdk/python/pulumi_oci/dns/resolver_endpoint.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/dns/resolver_endpoint.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/dns/resolver_endpoint.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ResolverEndpointInitArgs', 'ResolverEndpoint']
@pulumi.input_type
class ResolverEndpointInitArgs:
def __init__(__self__, *,
is_forwarding: pulumi.Input[bool],
is_listening: pulumi.Input[bool],
resolver_id: pulumi.Input[str],
scope: pulumi.Input[str],
subnet_id: pulumi.Input[str],
endpoint_type: Optional[pulumi.Input[str]] = None,
forwarding_address: Optional[pulumi.Input[str]] = None,
listening_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a ResolverEndpoint resource.
:param pulumi.Input[bool] is_forwarding: A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
:param pulumi.Input[bool] is_listening: A Boolean flag indicating whether or not the resolver endpoint is for listening.
:param pulumi.Input[str] resolver_id: The OCID of the target resolver.
:param pulumi.Input[str] scope: Value must be `PRIVATE` when creating private name resolver endpoints.
:param pulumi.Input[str] subnet_id: The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
:param pulumi.Input[str] endpoint_type: (Updatable) The type of resolver endpoint. VNIC is currently the only supported type.
:param pulumi.Input[str] forwarding_address: An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
:param pulumi.Input[str] listening_address: An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
:param pulumi.Input[str] name: The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
"""
pulumi.set(__self__, "is_forwarding", is_forwarding)
pulumi.set(__self__, "is_listening", is_listening)
pulumi.set(__self__, "resolver_id", resolver_id)
pulumi.set(__self__, "scope", scope)
pulumi.set(__self__, "subnet_id", subnet_id)
if endpoint_type is not None:
pulumi.set(__self__, "endpoint_type", endpoint_type)
if forwarding_address is not None:
pulumi.set(__self__, "forwarding_address", forwarding_address)
if listening_address is not None:
pulumi.set(__self__, "listening_address", listening_address)
if name is not None:
pulumi.set(__self__, "name", name)
if nsg_ids is not None:
pulumi.set(__self__, "nsg_ids", nsg_ids)
@property
@pulumi.getter(name="isForwarding")
def is_forwarding(self) -> pulumi.Input[bool]:
"""
A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
"""
return pulumi.get(self, "is_forwarding")
@is_forwarding.setter
def is_forwarding(self, value: pulumi.Input[bool]):
pulumi.set(self, "is_forwarding", value)
@property
@pulumi.getter(name="isListening")
def is_listening(self) -> pulumi.Input[bool]:
"""
A Boolean flag indicating whether or not the resolver endpoint is for listening.
"""
return pulumi.get(self, "is_listening")
@is_listening.setter
def is_listening(self, value: pulumi.Input[bool]):
pulumi.set(self, "is_listening", value)
@property
@pulumi.getter(name="resolverId")
def resolver_id(self) -> pulumi.Input[str]:
"""
The OCID of the target resolver.
"""
return pulumi.get(self, "resolver_id")
@resolver_id.setter
def resolver_id(self, value: pulumi.Input[str]):
pulumi.set(self, "resolver_id", value)
@property
@pulumi.getter
def scope(self) -> pulumi.Input[str]:
"""
Value must be `PRIVATE` when creating private name resolver endpoints.
"""
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: pulumi.Input[str]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Input[str]:
"""
The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="endpointType")
def endpoint_type(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The type of resolver endpoint. VNIC is currently the only supported type.
"""
return pulumi.get(self, "endpoint_type")
@endpoint_type.setter
def endpoint_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_type", value)
@property
@pulumi.getter(name="forwardingAddress")
def forwarding_address(self) -> Optional[pulumi.Input[str]]:
"""
An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
"""
return pulumi.get(self, "forwarding_address")
@forwarding_address.setter
def forwarding_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "forwarding_address", value)
@property
@pulumi.getter(name="listeningAddress")
def listening_address(self) -> Optional[pulumi.Input[str]]:
"""
An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
"""
return pulumi.get(self, "listening_address")
@listening_address.setter
def listening_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "listening_address", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nsgIds")
def nsg_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
"""
return pulumi.get(self, "nsg_ids")
@nsg_ids.setter
def nsg_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "nsg_ids", value)
@pulumi.input_type
class _ResolverEndpointState:
def __init__(__self__, *,
compartment_id: Optional[pulumi.Input[str]] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
forwarding_address: Optional[pulumi.Input[str]] = None,
is_forwarding: Optional[pulumi.Input[bool]] = None,
is_listening: Optional[pulumi.Input[bool]] = None,
listening_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resolver_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
self: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ResolverEndpoint resources.
:param pulumi.Input[str] compartment_id: The OCID of the owning compartment. This will match the resolver that the resolver endpoint is under and will be updated if the resolver's compartment is changed.
:param pulumi.Input[str] endpoint_type: (Updatable) The type of resolver endpoint. VNIC is currently the only supported type.
:param pulumi.Input[str] forwarding_address: An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
:param pulumi.Input[bool] is_forwarding: A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
:param pulumi.Input[bool] is_listening: A Boolean flag indicating whether or not the resolver endpoint is for listening.
:param pulumi.Input[str] listening_address: An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
:param pulumi.Input[str] name: The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
:param pulumi.Input[str] resolver_id: The OCID of the target resolver.
:param pulumi.Input[str] scope: Value must be `PRIVATE` when creating private name resolver endpoints.
:param pulumi.Input[str] self: The canonical absolute URL of the resource.
:param pulumi.Input[str] state: The current state of the resource.
:param pulumi.Input[str] subnet_id: The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
:param pulumi.Input[str] time_created: The date and time the resource was created in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
:param pulumi.Input[str] time_updated: The date and time the resource was last updated in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
if compartment_id is not None:
pulumi.set(__self__, "compartment_id", compartment_id)
if endpoint_type is not None:
pulumi.set(__self__, "endpoint_type", endpoint_type)
if forwarding_address is not None:
pulumi.set(__self__, "forwarding_address", forwarding_address)
if is_forwarding is not None:
pulumi.set(__self__, "is_forwarding", is_forwarding)
if is_listening is not None:
pulumi.set(__self__, "is_listening", is_listening)
if listening_address is not None:
pulumi.set(__self__, "listening_address", listening_address)
if name is not None:
pulumi.set(__self__, "name", name)
if nsg_ids is not None:
pulumi.set(__self__, "nsg_ids", nsg_ids)
if resolver_id is not None:
pulumi.set(__self__, "resolver_id", resolver_id)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if self is not None:
pulumi.set(__self__, "self", self)
if state is not None:
pulumi.set(__self__, "state", state)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if time_created is not None:
pulumi.set(__self__, "time_created", time_created)
if time_updated is not None:
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the owning compartment. This will match the resolver that the resolver endpoint is under and will be updated if the resolver's compartment is changed.
"""
return pulumi.get(self, "compartment_id")
@compartment_id.setter
def compartment_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compartment_id", value)
@property
@pulumi.getter(name="endpointType")
def endpoint_type(self) -> Optional[pulumi.Input[str]]:
"""
(Updatable) The type of resolver endpoint. VNIC is currently the only supported type.
"""
return pulumi.get(self, "endpoint_type")
@endpoint_type.setter
def endpoint_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint_type", value)
@property
@pulumi.getter(name="forwardingAddress")
def forwarding_address(self) -> Optional[pulumi.Input[str]]:
"""
An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
"""
return pulumi.get(self, "forwarding_address")
@forwarding_address.setter
def forwarding_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "forwarding_address", value)
@property
@pulumi.getter(name="isForwarding")
def is_forwarding(self) -> Optional[pulumi.Input[bool]]:
"""
A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
"""
return pulumi.get(self, "is_forwarding")
@is_forwarding.setter
def is_forwarding(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_forwarding", value)
@property
@pulumi.getter(name="isListening")
def is_listening(self) -> Optional[pulumi.Input[bool]]:
"""
A Boolean flag indicating whether or not the resolver endpoint is for listening.
"""
return pulumi.get(self, "is_listening")
@is_listening.setter
def is_listening(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_listening", value)
@property
@pulumi.getter(name="listeningAddress")
def listening_address(self) -> Optional[pulumi.Input[str]]:
"""
An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
"""
return pulumi.get(self, "listening_address")
@listening_address.setter
def listening_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "listening_address", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nsgIds")
def nsg_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
"""
return pulumi.get(self, "nsg_ids")
@nsg_ids.setter
def nsg_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "nsg_ids", value)
@property
@pulumi.getter(name="resolverId")
def resolver_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of the target resolver.
"""
return pulumi.get(self, "resolver_id")
@resolver_id.setter
def resolver_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resolver_id", value)
@property
@pulumi.getter
def scope(self) -> Optional[pulumi.Input[str]]:
"""
Value must be `PRIVATE` when creating private name resolver endpoints.
"""
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter
def self(self) -> Optional[pulumi.Input[str]]:
"""
The canonical absolute URL of the resource.
"""
return pulumi.get(self, "self")
@self.setter
def self(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "self", value)
@property
@pulumi.getter
def state(self) -> Optional[pulumi.Input[str]]:
"""
The current state of the resource.
"""
return pulumi.get(self, "state")
@state.setter
def state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "state", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the resource was created in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_created")
@time_created.setter
def time_created(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_created", value)
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> Optional[pulumi.Input[str]]:
"""
The date and time the resource was last updated in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_updated")
@time_updated.setter
def time_updated(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "time_updated", value)
class ResolverEndpoint(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
forwarding_address: Optional[pulumi.Input[str]] = None,
is_forwarding: Optional[pulumi.Input[bool]] = None,
is_listening: Optional[pulumi.Input[bool]] = None,
listening_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resolver_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
This resource provides the Resolver Endpoint resource in Oracle Cloud Infrastructure DNS service.
Creates a new resolver endpoint. Requires a `PRIVATE` scope query parameter.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_resolver_endpoint = oci.dns.ResolverEndpoint("testResolverEndpoint",
is_forwarding=var["resolver_endpoint_is_forwarding"],
is_listening=var["resolver_endpoint_is_listening"],
resolver_id=oci_dns_resolver["test_resolver"]["id"],
subnet_id=oci_core_subnet["test_subnet"]["id"],
scope="PRIVATE",
endpoint_type=var["resolver_endpoint_endpoint_type"],
forwarding_address=var["resolver_endpoint_forwarding_address"],
listening_address=var["resolver_endpoint_listening_address"],
nsg_ids=var["resolver_endpoint_nsg_ids"])
```
## Import
For legacy ResolverEndpoints created without `scope`, these ResolverEndpoints can be imported using the `id`, e.g.
```sh
$ pulumi import oci:dns/resolverEndpoint:ResolverEndpoint test_resolver_endpoint "resolverId/{resolverId}/name/{resolverEndpointName}"
```
For ResolverEndpoints created using `scope`, these ResolverEndpoints can be imported using the `id`, e.g.
```sh
$ pulumi import oci:dns/resolverEndpoint:ResolverEndpoint test_resolver_endpoint "resolverId/{resolverId}/name/{name}/scope/{scope}"
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] endpoint_type: (Updatable) The type of resolver endpoint. VNIC is currently the only supported type.
:param pulumi.Input[str] forwarding_address: An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
:param pulumi.Input[bool] is_forwarding: A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
:param pulumi.Input[bool] is_listening: A Boolean flag indicating whether or not the resolver endpoint is for listening.
:param pulumi.Input[str] listening_address: An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
:param pulumi.Input[str] name: The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
:param pulumi.Input[str] resolver_id: The OCID of the target resolver.
:param pulumi.Input[str] scope: Value must be `PRIVATE` when creating private name resolver endpoints.
:param pulumi.Input[str] subnet_id: The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ResolverEndpointInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
This resource provides the Resolver Endpoint resource in Oracle Cloud Infrastructure DNS service.
Creates a new resolver endpoint. Requires a `PRIVATE` scope query parameter.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_resolver_endpoint = oci.dns.ResolverEndpoint("testResolverEndpoint",
is_forwarding=var["resolver_endpoint_is_forwarding"],
is_listening=var["resolver_endpoint_is_listening"],
resolver_id=oci_dns_resolver["test_resolver"]["id"],
subnet_id=oci_core_subnet["test_subnet"]["id"],
scope="PRIVATE",
endpoint_type=var["resolver_endpoint_endpoint_type"],
forwarding_address=var["resolver_endpoint_forwarding_address"],
listening_address=var["resolver_endpoint_listening_address"],
nsg_ids=var["resolver_endpoint_nsg_ids"])
```
## Import
For legacy ResolverEndpoints created without `scope`, these ResolverEndpoints can be imported using the `id`, e.g.
```sh
$ pulumi import oci:dns/resolverEndpoint:ResolverEndpoint test_resolver_endpoint "resolverId/{resolverId}/name/{resolverEndpointName}"
```
For ResolverEndpoints created using `scope`, these ResolverEndpoints can be imported using the `id`, e.g.
```sh
$ pulumi import oci:dns/resolverEndpoint:ResolverEndpoint test_resolver_endpoint "resolverId/{resolverId}/name/{name}/scope/{scope}"
```
:param str resource_name: The name of the resource.
:param ResolverEndpointInitArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ResolverEndpointInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
forwarding_address: Optional[pulumi.Input[str]] = None,
is_forwarding: Optional[pulumi.Input[bool]] = None,
is_listening: Optional[pulumi.Input[bool]] = None,
listening_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resolver_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ResolverEndpointInitArgs.__new__(ResolverEndpointInitArgs)
__props__.__dict__["endpoint_type"] = endpoint_type
__props__.__dict__["forwarding_address"] = forwarding_address
if is_forwarding is None and not opts.urn:
raise TypeError("Missing required property 'is_forwarding'")
__props__.__dict__["is_forwarding"] = is_forwarding
if is_listening is None and not opts.urn:
raise TypeError("Missing required property 'is_listening'")
__props__.__dict__["is_listening"] = is_listening
__props__.__dict__["listening_address"] = listening_address
__props__.__dict__["name"] = name
__props__.__dict__["nsg_ids"] = nsg_ids
if resolver_id is None and not opts.urn:
raise TypeError("Missing required property 'resolver_id'")
__props__.__dict__["resolver_id"] = resolver_id
if scope is None and not opts.urn:
raise TypeError("Missing required property 'scope'")
__props__.__dict__["scope"] = scope
if subnet_id is None and not opts.urn:
raise TypeError("Missing required property 'subnet_id'")
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["compartment_id"] = None
__props__.__dict__["self"] = None
__props__.__dict__["state"] = None
__props__.__dict__["time_created"] = None
__props__.__dict__["time_updated"] = None
super(ResolverEndpoint, __self__).__init__(
'oci:dns/resolverEndpoint:ResolverEndpoint',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
compartment_id: Optional[pulumi.Input[str]] = None,
endpoint_type: Optional[pulumi.Input[str]] = None,
forwarding_address: Optional[pulumi.Input[str]] = None,
is_forwarding: Optional[pulumi.Input[bool]] = None,
is_listening: Optional[pulumi.Input[bool]] = None,
listening_address: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
nsg_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resolver_id: Optional[pulumi.Input[str]] = None,
scope: Optional[pulumi.Input[str]] = None,
self: Optional[pulumi.Input[str]] = None,
state: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
time_created: Optional[pulumi.Input[str]] = None,
time_updated: Optional[pulumi.Input[str]] = None) -> 'ResolverEndpoint':
"""
Get an existing ResolverEndpoint resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] compartment_id: The OCID of the owning compartment. This will match the resolver that the resolver endpoint is under and will be updated if the resolver's compartment is changed.
:param pulumi.Input[str] endpoint_type: (Updatable) The type of resolver endpoint. VNIC is currently the only supported type.
:param pulumi.Input[str] forwarding_address: An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
:param pulumi.Input[bool] is_forwarding: A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
:param pulumi.Input[bool] is_listening: A Boolean flag indicating whether or not the resolver endpoint is for listening.
:param pulumi.Input[str] listening_address: An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
:param pulumi.Input[str] name: The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
:param pulumi.Input[Sequence[pulumi.Input[str]]] nsg_ids: An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
:param pulumi.Input[str] resolver_id: The OCID of the target resolver.
:param pulumi.Input[str] scope: Value must be `PRIVATE` when creating private name resolver endpoints.
:param pulumi.Input[str] self: The canonical absolute URL of the resource.
:param pulumi.Input[str] state: The current state of the resource.
:param pulumi.Input[str] subnet_id: The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
:param pulumi.Input[str] time_created: The date and time the resource was created in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
:param pulumi.Input[str] time_updated: The date and time the resource was last updated in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ResolverEndpointState.__new__(_ResolverEndpointState)
__props__.__dict__["compartment_id"] = compartment_id
__props__.__dict__["endpoint_type"] = endpoint_type
__props__.__dict__["forwarding_address"] = forwarding_address
__props__.__dict__["is_forwarding"] = is_forwarding
__props__.__dict__["is_listening"] = is_listening
__props__.__dict__["listening_address"] = listening_address
__props__.__dict__["name"] = name
__props__.__dict__["nsg_ids"] = nsg_ids
__props__.__dict__["resolver_id"] = resolver_id
__props__.__dict__["scope"] = scope
__props__.__dict__["self"] = self
__props__.__dict__["state"] = state
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["time_created"] = time_created
__props__.__dict__["time_updated"] = time_updated
return ResolverEndpoint(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> pulumi.Output[str]:
"""
The OCID of the owning compartment. This will match the resolver that the resolver endpoint is under and will be updated if the resolver's compartment is changed.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="endpointType")
def endpoint_type(self) -> pulumi.Output[str]:
"""
(Updatable) The type of resolver endpoint. VNIC is currently the only supported type.
"""
return pulumi.get(self, "endpoint_type")
@property
@pulumi.getter(name="forwardingAddress")
def forwarding_address(self) -> pulumi.Output[str]:
"""
An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
"""
return pulumi.get(self, "forwarding_address")
@property
@pulumi.getter(name="isForwarding")
def is_forwarding(self) -> pulumi.Output[bool]:
"""
A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
"""
return pulumi.get(self, "is_forwarding")
@property
@pulumi.getter(name="isListening")
def is_listening(self) -> pulumi.Output[bool]:
"""
A Boolean flag indicating whether or not the resolver endpoint is for listening.
"""
return pulumi.get(self, "is_listening")
@property
@pulumi.getter(name="listeningAddress")
def listening_address(self) -> pulumi.Output[str]:
"""
An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
"""
return pulumi.get(self, "listening_address")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nsgIds")
def nsg_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
"""
return pulumi.get(self, "nsg_ids")
@property
@pulumi.getter(name="resolverId")
def resolver_id(self) -> pulumi.Output[str]:
"""
The OCID of the target resolver.
"""
return pulumi.get(self, "resolver_id")
@property
@pulumi.getter
def scope(self) -> pulumi.Output[str]:
"""
Value must be `PRIVATE` when creating private name resolver endpoints.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter
def self(self) -> pulumi.Output[str]:
"""
The canonical absolute URL of the resource.
"""
return pulumi.get(self, "self")
@property
@pulumi.getter
def state(self) -> pulumi.Output[str]:
"""
The current state of the resource.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Output[str]:
"""
The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> pulumi.Output[str]:
"""
The date and time the resource was created in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> pulumi.Output[str]:
"""
The date and time the resource was last updated in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_updated")
| 47.797194
| 248
| 0.66181
| 4,709
| 37,473
| 5.082608
| 0.050329
| 0.081349
| 0.079552
| 0.06802
| 0.904654
| 0.88719
| 0.87219
| 0.853514
| 0.839517
| 0.81215
| 0
| 0.001162
| 0.241961
| 37,473
| 783
| 249
| 47.858238
| 0.841407
| 0.395325
| 0
| 0.686499
| 1
| 0
| 0.098949
| 0.003133
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16476
| false
| 0.002288
| 0.011442
| 0
| 0.276888
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f94dcb94c58941f05446da27e86ab1091d2477cc
| 206
|
py
|
Python
|
AbstractClasses.py
|
OluSure/Hacktoberfest2021-1
|
ad1bafb0db2f0cdeaae8f87abbaa716638c5d2ea
|
[
"MIT"
] | 215
|
2021-10-01T08:18:16.000Z
|
2022-03-29T04:12:03.000Z
|
AbstractClasses.py
|
OluSure/Hacktoberfest2021-1
|
ad1bafb0db2f0cdeaae8f87abbaa716638c5d2ea
|
[
"MIT"
] | 51
|
2021-10-01T08:16:42.000Z
|
2021-10-31T13:51:51.000Z
|
AbstractClasses.py
|
OluSure/Hacktoberfest2021-1
|
ad1bafb0db2f0cdeaae8f87abbaa716638c5d2ea
|
[
"MIT"
] | 807
|
2021-10-01T08:11:45.000Z
|
2021-11-21T18:57:09.000Z
|
import abc
class parent:
def geeks(self):
pass
class child(parent):
def geeks(self):
print("child class")
print( issubclass(child, parent))
print( isinstance(child(), parent))
| 20.6
| 35
| 0.635922
| 25
| 206
| 5.24
| 0.48
| 0.251908
| 0.21374
| 0.274809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.237864
| 206
| 9
| 36
| 22.888889
| 0.834395
| 0
| 0
| 0.222222
| 0
| 0
| 0.053398
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0.111111
| 0
| 0.555556
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
f9aed3f6865825d40367dcabf63f3a7d3062faca
| 139
|
py
|
Python
|
xdays_journey/__init__.py
|
pyconjp/x-days-journey
|
e59f5a00750b93763beeab07dda3c893c28f27fd
|
[
"MIT"
] | null | null | null |
xdays_journey/__init__.py
|
pyconjp/x-days-journey
|
e59f5a00750b93763beeab07dda3c893c28f27fd
|
[
"MIT"
] | null | null | null |
xdays_journey/__init__.py
|
pyconjp/x-days-journey
|
e59f5a00750b93763beeab07dda3c893c28f27fd
|
[
"MIT"
] | null | null | null |
from datetime import date
PYCONJP_2021_DAY1 = date(2021, 10, 15)
def あと何日(the_day=PYCONJP_2021_DAY1):
return the_day - date.today()
| 17.375
| 38
| 0.748201
| 23
| 139
| 4.26087
| 0.652174
| 0.22449
| 0.306122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0.158273
| 139
| 7
| 39
| 19.857143
| 0.683761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
f9dc63e5e6a91e5e02c739267ba39388f5aa3b86
| 9,600
|
py
|
Python
|
tests/v3_validation/cattlevalidationtest/core/test_ebs_volume.py
|
bmdepesa/validation-tests
|
23e7ab95ce76744483a0657f790b42a88a93436d
|
[
"Apache-2.0"
] | 7
|
2015-11-18T17:43:08.000Z
|
2021-07-14T09:48:18.000Z
|
tests/v3_validation/cattlevalidationtest/core/test_ebs_volume.py
|
bmdepesa/validation-tests
|
23e7ab95ce76744483a0657f790b42a88a93436d
|
[
"Apache-2.0"
] | 175
|
2015-07-09T18:41:24.000Z
|
2021-06-10T21:23:27.000Z
|
tests/v3_validation/cattlevalidationtest/core/test_ebs_volume.py
|
bmdepesa/validation-tests
|
23e7ab95ce76744483a0657f790b42a88a93436d
|
[
"Apache-2.0"
] | 25
|
2015-08-08T04:54:24.000Z
|
2021-05-25T21:10:37.000Z
|
from common_fixtures import * # NOQA
if_test_ebs = pytest.mark.skipif(
RANCHER_EBS != "true",
reason="rancher ebs test environment is not enabled"
)
@if_test_ebs
def test_environment_ebs_volume_on_same_host(client, super_client):
# Launching a service with scale 2 using the same volume
# All the container should land on the same host
assert check_for_ebs_driver(client)
volume_name = "ebs_" + random_str()
volume = \
client.create_volume({"type": "volume", "driver": "rancher-ebs",
"name": volume_name,
"driverOpts": {"size": "1"}})
launch_config = {"volumeDriver": "rancher-ebs",
"dataVolumes": [volume_name + ":/test"],
"networkMode": "managed",
"image": "ubuntu:14.04.3",
"stdinOpen": True
}
scale = 2
service, stack = create_env_and_svc(client, launch_config,
scale)
stack = stack.activateservices()
service = client.wait_success(service, 300)
assert service.state == "active"
container_list = get_service_container_list(client, service)
assert container_list[0].hostId == container_list[1].hostId
client.wait_success(client.delete(stack))
while True:
try:
client.delete(volume)
break
except Exception:
time.sleep(1)
pass
@if_test_ebs
def test_environment_ebs_volume_read_write_data(client, super_client):
# Launching two service with scale 1 using the same volume
# Volume should be able to read and write from all the containers
assert check_for_ebs_driver(client)
volume_name = "ebs_" + random_str()
hosts = client.list_host(kind='docker', removed_null=True, state="active")
volume = client.create_volume({"type": "volume", "driver": "rancher-ebs",
"name": volume_name,
"driverOpts": {"size": "1"}})
path = "/test"
port = "1000"
launch_config = {"volumeDriver": "rancher-ebs",
"dataVolumes": [volume_name + ":" + path],
"ports": [port + ":22/tcp"],
"networkMode": "managed",
"image": SSH_IMAGE_UUID,
"stdinOpen": True,
"requestedHostId": hosts[0].id
}
scale = 1
service1, stack1 = create_env_and_svc(client, launch_config,
scale)
stack1 = stack1.activateservices()
service1 = client.wait_success(service1, 300)
assert service1.state == "active"
container_list = get_service_container_list(super_client, service1)
filename = "test"
content = random_str()
write_data(container_list[0], int(port), path, filename, content)
path = "/test"
port = "1001"
launch_config = {"volumeDriver": "rancher-ebs",
"dataVolumes": [volume_name + ":" + path],
"ports": [port + ":22/tcp"],
"networkMode": "managed",
"image": SSH_IMAGE_UUID,
"stdinOpen": True,
"requestedHostId": hosts[0].id
}
scale = 1
service2, stack2 = create_env_and_svc(client, launch_config,
scale)
stack2 = stack2.activateservices()
service2 = client.wait_success(service2, 300)
assert service2.state == "active"
container_list = get_service_container_list(super_client, service2)
file_content = \
read_data(container_list[0], int(port), path, filename)
assert file_content == content
client.wait_success(client.delete(stack1))
client.wait_success(client.delete(stack2))
while True:
try:
client.delete(volume)
break
except Exception:
time.sleep(1)
pass
@if_test_ebs
def test_ebs_volume_move_same_host(client, super_client):
# Launch a service with scale 1 using the volume1.
# Write data to the volume.
# Delete the service and re-launch a new one on the same host
# using the same volume. Data should be persisted.
assert check_for_ebs_driver(client)
volume_name = "ebs_" + random_str()
path = "/test"
port = "1000"
hosts = client.list_host(kind='docker', removed_null=True, state="active")
volume = client.create_volume({"type": "volume", "driver": "rancher-ebs",
"name": volume_name,
"driverOpts": {"size": "1"}})
launch_config = {"volumeDriver": "rancher-ebs",
"dataVolumes": [volume_name + ":" + path],
"ports": [port + ":22/tcp"],
"networkMode": "managed",
"image": SSH_IMAGE_UUID,
"stdinOpen": True,
"requestedHostId": hosts[0].id
}
scale = 1
service, stack = create_env_and_svc(client, launch_config,
scale)
stack = stack.activateservices()
service = client.wait_success(service, 300)
assert service.state == "active"
filename = "test"
content = random_str()
container_list = get_service_container_list(super_client, service)
write_data(container_list[0], int(port), path, filename, content)
delete_all(client, [stack])
wait_for_condition(client, volume, lambda x: x.state == "detached")
launch_config = {"volumeDriver": "rancher-ebs",
"dataVolumes": [volume_name + ":/test"],
"networkMode": "managed",
"ports": [port + ":22/tcp"],
"image": SSH_IMAGE_UUID,
"stdinOpen": True,
"requestedHostId": hosts[0].id
}
service, stack = create_env_and_svc(client, launch_config,
scale)
stack = stack.activateservices()
service = client.wait_success(service, 300)
assert service.state == "active"
container_list = get_service_container_list(super_client, service)
file_content = \
read_data(container_list[0], int(port), path, filename)
assert file_content == content
client.wait_success(client.delete(stack))
while True:
try:
client.delete(volume)
break
except Exception:
time.sleep(1)
pass
@if_test_ebs
def test_ebs_volume_move_diff_hosts(client, super_client):
# Launch a service with scale 1 using the volume1.
# Write data to the volume.
# Delete the service and re-launch a new one on the a different host
# using the same volume. Data should be persisted.
# this test requires host a and host b are in the same AZ
assert check_for_ebs_driver(client)
volume_name = "ebs_" + random_str()
hosts = client.list_host(kind='docker', removed_null=True, state="active")
path = "/test"
port = "1001"
volume = client.create_volume({"type": "volume", "driver": "rancher-ebs",
"name": volume_name,
"driverOpts": {"size": "1"}})
launch_config = {"volumeDriver": "rancher-ebs",
"dataVolumes": [volume_name + ":" + path],
"ports": [port + ":22/tcp"],
"networkMode": "managed",
"image": SSH_IMAGE_UUID,
"stdinOpen": True,
"requestedHostId": hosts[0].id
}
scale = 1
service, stack = create_env_and_svc(client, launch_config,
scale)
stack = stack.activateservices()
service = client.wait_success(service, 300)
assert service.state == "active"
filename = "test"
content = random_str()
container_list = get_service_container_list(super_client, service)
write_data(container_list[0], int(port), path, filename, content)
delete_all(client, [stack])
wait_for_condition(client, volume, lambda x: x.state == "detached")
launch_config = {"volumeDriver": "rancher-ebs",
"dataVolumes": [volume_name + ":" + path],
"ports": [port + ":22/tcp"],
"networkMode": "managed",
"image": SSH_IMAGE_UUID,
"stdinOpen": True,
"requestedHostId": hosts[1].id
}
scale = 1
service, stack = create_env_and_svc(client, launch_config,
scale)
stack = stack.activateservices()
service = client.wait_success(service, 300)
assert service.state == "active"
container_list = get_service_container_list(super_client, service)
file_content = \
read_data(container_list[0], int(port), path, filename)
assert file_content == content
client.wait_success(client.delete(stack))
while True:
try:
client.delete(volume)
break
except Exception:
time.sleep(1)
pass
def check_for_ebs_driver(client):
ebs_driver = False
env = client.list_stack(name="ebs")
if len(env) == 1:
service = get_service_by_name(client, env[0],
"ebs-driver")
if service.state == "active":
ebs_driver = True
return ebs_driver
| 37.065637
| 78
| 0.562604
| 1,005
| 9,600
| 5.160199
| 0.137313
| 0.055148
| 0.039337
| 0.041843
| 0.828191
| 0.806402
| 0.801581
| 0.801581
| 0.776128
| 0.759545
| 0
| 0.01677
| 0.329167
| 9,600
| 258
| 79
| 37.209302
| 0.788509
| 0.068542
| 0
| 0.787736
| 0
| 0
| 0.119077
| 0
| 0
| 0
| 0
| 0
| 0.070755
| 1
| 0.023585
| false
| 0.018868
| 0.004717
| 0
| 0.033019
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ddae69f16402a28e25687cfb7cdb84da63eac338
| 61,576
|
py
|
Python
|
graphsense/api/addresses_api.py
|
graphsense/graphsense-python
|
c0dafc97a04bc3dbf0caf08a981bb591bd1e430a
|
[
"MIT"
] | 9
|
2020-11-26T12:26:36.000Z
|
2022-02-07T22:08:16.000Z
|
graphsense/api/addresses_api.py
|
graphsense/graphsense-python
|
c0dafc97a04bc3dbf0caf08a981bb591bd1e430a
|
[
"MIT"
] | 14
|
2020-11-17T13:28:08.000Z
|
2022-01-24T09:21:43.000Z
|
graphsense/api/addresses_api.py
|
graphsense/graphsense-python
|
c0dafc97a04bc3dbf0caf08a981bb591bd1e430a
|
[
"MIT"
] | 3
|
2022-02-03T09:24:27.000Z
|
2022-02-16T10:13:55.000Z
|
"""
GraphSense API
GraphSense API # noqa: E501
The version of the OpenAPI document: 0.5
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from graphsense.api_client import ApiClient, Endpoint as _Endpoint
from graphsense.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from graphsense.model.address import Address
from graphsense.model.address_tag import AddressTag
from graphsense.model.address_txs import AddressTxs
from graphsense.model.addresses import Addresses
from graphsense.model.entity import Entity
from graphsense.model.link import Link
from graphsense.model.neighbors import Neighbors
class AddressesApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __get_address(
self,
currency,
address,
**kwargs
):
"""Get an address, optionally with tags # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_address(currency, address, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
Keyword Args:
include_tags (bool): Whether tags should be included. [optional] if omitted the server will use the default value of False
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Address
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
return self.call_with_http_info(**kwargs)
self.get_address = _Endpoint(
settings={
'response_type': (Address,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}',
'operation_id': 'get_address',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
'include_tags',
],
'required': [
'currency',
'address',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
'include_tags':
(bool,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
'include_tags': 'include_tags',
},
'location_map': {
'currency': 'path',
'address': 'path',
'include_tags': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_address
)
def __get_address_entity(
self,
currency,
address,
**kwargs
):
"""Get the entity of an address # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_address_entity(currency, address, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
Keyword Args:
include_tags (bool): Whether tags should be included. [optional] if omitted the server will use the default value of False
tag_coherence (bool): Whether to calculate coherence of address tags. [optional] if omitted the server will use the default value of False
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Entity
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
return self.call_with_http_info(**kwargs)
self.get_address_entity = _Endpoint(
settings={
'response_type': (Entity,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/entity',
'operation_id': 'get_address_entity',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
'include_tags',
'tag_coherence',
],
'required': [
'currency',
'address',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
'include_tags':
(bool,),
'tag_coherence':
(bool,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
'include_tags': 'include_tags',
'tag_coherence': 'tag_coherence',
},
'location_map': {
'currency': 'path',
'address': 'path',
'include_tags': 'query',
'tag_coherence': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_address_entity
)
def __list_address_links(
self,
currency,
address,
neighbor,
**kwargs
):
"""Get transactions between two addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_address_links(currency, address, neighbor, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
neighbor (str): Neighbor address
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Link]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
kwargs['neighbor'] = \
neighbor
return self.call_with_http_info(**kwargs)
self.list_address_links = _Endpoint(
settings={
'response_type': ([Link],),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/links',
'operation_id': 'list_address_links',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
'neighbor',
],
'required': [
'currency',
'address',
'neighbor',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
'neighbor':
(str,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
'neighbor': 'neighbor',
},
'location_map': {
'currency': 'path',
'address': 'path',
'neighbor': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_address_links
)
def __list_address_links_csv(
self,
currency,
address,
neighbor,
**kwargs
):
"""Get transactions between two addresses as CSV # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_address_links_csv(currency, address, neighbor, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
neighbor (str): Neighbor address
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
kwargs['neighbor'] = \
neighbor
return self.call_with_http_info(**kwargs)
self.list_address_links_csv = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/links.csv',
'operation_id': 'list_address_links_csv',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
'neighbor',
],
'required': [
'currency',
'address',
'neighbor',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
'neighbor':
(str,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
'neighbor': 'neighbor',
},
'location_map': {
'currency': 'path',
'address': 'path',
'neighbor': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'text/csv'
],
'content_type': [],
},
api_client=api_client,
callable=__list_address_links_csv
)
def __list_address_neighbors(
self,
currency,
address,
direction,
**kwargs
):
"""Get an addresses' neighbors in the address graph # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_address_neighbors(currency, address, direction, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
direction (str): Incoming or outgoing neighbors
Keyword Args:
include_labels (bool): Whether labels of tags should be included. [optional] if omitted the server will use the default value of False
page (str): Resumption token for retrieving the next page. [optional]
pagesize (int): Number of items returned in a single page. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Neighbors
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
kwargs['direction'] = \
direction
return self.call_with_http_info(**kwargs)
self.list_address_neighbors = _Endpoint(
settings={
'response_type': (Neighbors,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/neighbors',
'operation_id': 'list_address_neighbors',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
'direction',
'include_labels',
'page',
'pagesize',
],
'required': [
'currency',
'address',
'direction',
],
'nullable': [
],
'enum': [
'direction',
],
'validation': [
'pagesize',
]
},
root_map={
'validations': {
('pagesize',): {
'inclusive_minimum': 1,
},
},
'allowed_values': {
('direction',): {
"IN": "in",
"OUT": "out"
},
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
'direction':
(str,),
'include_labels':
(bool,),
'page':
(str,),
'pagesize':
(int,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
'direction': 'direction',
'include_labels': 'include_labels',
'page': 'page',
'pagesize': 'pagesize',
},
'location_map': {
'currency': 'path',
'address': 'path',
'direction': 'query',
'include_labels': 'query',
'page': 'query',
'pagesize': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_address_neighbors
)
def __list_address_neighbors_csv(
self,
currency,
address,
direction,
**kwargs
):
"""Get an addresses' neighbors in the address graph as CSV # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_address_neighbors_csv(currency, address, direction, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
direction (str): Incoming or outgoing neighbors
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
kwargs['direction'] = \
direction
return self.call_with_http_info(**kwargs)
self.list_address_neighbors_csv = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/neighbors.csv',
'operation_id': 'list_address_neighbors_csv',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
'direction',
],
'required': [
'currency',
'address',
'direction',
],
'nullable': [
],
'enum': [
'direction',
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
('direction',): {
"IN": "in",
"OUT": "out"
},
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
'direction':
(str,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
'direction': 'direction',
},
'location_map': {
'currency': 'path',
'address': 'path',
'direction': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'text/csv'
],
'content_type': [],
},
api_client=api_client,
callable=__list_address_neighbors_csv
)
def __list_address_txs(
self,
currency,
address,
**kwargs
):
"""Get all transactions an address has been involved in # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_address_txs(currency, address, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
Keyword Args:
page (str): Resumption token for retrieving the next page. [optional]
pagesize (int): Number of items returned in a single page. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
AddressTxs
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
return self.call_with_http_info(**kwargs)
self.list_address_txs = _Endpoint(
settings={
'response_type': (AddressTxs,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/txs',
'operation_id': 'list_address_txs',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
'page',
'pagesize',
],
'required': [
'currency',
'address',
],
'nullable': [
],
'enum': [
],
'validation': [
'pagesize',
]
},
root_map={
'validations': {
('pagesize',): {
'inclusive_minimum': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
'page':
(str,),
'pagesize':
(int,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
'page': 'page',
'pagesize': 'pagesize',
},
'location_map': {
'currency': 'path',
'address': 'path',
'page': 'query',
'pagesize': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_address_txs
)
def __list_address_txs_csv(
self,
currency,
address,
**kwargs
):
"""Get all transactions an address has been involved in as CSV # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_address_txs_csv(currency, address, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
return self.call_with_http_info(**kwargs)
self.list_address_txs_csv = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/txs.csv',
'operation_id': 'list_address_txs_csv',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
],
'required': [
'currency',
'address',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
},
'location_map': {
'currency': 'path',
'address': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'text/csv'
],
'content_type': [],
},
api_client=api_client,
callable=__list_address_txs_csv
)
def __list_addresses(
self,
currency,
**kwargs
):
"""Get addresses # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_addresses(currency, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
Keyword Args:
ids ([str]): Restrict result to given set of comma separated IDs. [optional]
page (str): Resumption token for retrieving the next page. [optional]
pagesize (int): Number of items returned in a single page. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Addresses
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
return self.call_with_http_info(**kwargs)
self.list_addresses = _Endpoint(
settings={
'response_type': (Addresses,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses',
'operation_id': 'list_addresses',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'ids',
'page',
'pagesize',
],
'required': [
'currency',
],
'nullable': [
],
'enum': [
],
'validation': [
'pagesize',
]
},
root_map={
'validations': {
('pagesize',): {
'inclusive_minimum': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'ids':
([str],),
'page':
(str,),
'pagesize':
(int,),
},
'attribute_map': {
'currency': 'currency',
'ids': 'ids',
'page': 'page',
'pagesize': 'pagesize',
},
'location_map': {
'currency': 'path',
'ids': 'query',
'page': 'query',
'pagesize': 'query',
},
'collection_format_map': {
'ids': 'multi',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_addresses
)
def __list_addresses_csv(
self,
currency,
ids,
**kwargs
):
"""Get addresses as CSV # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_addresses_csv(currency, ids, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
ids ([str]): Set of comma separated IDs
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['ids'] = \
ids
return self.call_with_http_info(**kwargs)
self.list_addresses_csv = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses.csv',
'operation_id': 'list_addresses_csv',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'ids',
],
'required': [
'currency',
'ids',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'ids':
([str],),
},
'attribute_map': {
'currency': 'currency',
'ids': 'ids',
},
'location_map': {
'currency': 'path',
'ids': 'query',
},
'collection_format_map': {
'ids': 'multi',
}
},
headers_map={
'accept': [
'application/csv'
],
'content_type': [],
},
api_client=api_client,
callable=__list_addresses_csv
)
def __list_tags_by_address(
self,
currency,
address,
**kwargs
):
"""Get attribution tags for a given address # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tags_by_address(currency, address, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[AddressTag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
return self.call_with_http_info(**kwargs)
self.list_tags_by_address = _Endpoint(
settings={
'response_type': ([AddressTag],),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/tags',
'operation_id': 'list_tags_by_address',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
],
'required': [
'currency',
'address',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
},
'location_map': {
'currency': 'path',
'address': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_tags_by_address
)
def __list_tags_by_address_csv(
self,
currency,
address,
**kwargs
):
"""Get attribution tags for a given address # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_tags_by_address_csv(currency, address, async_req=True)
>>> result = thread.get()
Args:
currency (str): The cryptocurrency (e.g., btc)
address (str): The cryptocurrency address
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['currency'] = \
currency
kwargs['address'] = \
address
return self.call_with_http_info(**kwargs)
self.list_tags_by_address_csv = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'api_key'
],
'endpoint_path': '/{currency}/addresses/{address}/tags.csv',
'operation_id': 'list_tags_by_address_csv',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'currency',
'address',
],
'required': [
'currency',
'address',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'currency':
(str,),
'address':
(str,),
},
'attribute_map': {
'currency': 'currency',
'address': 'address',
},
'location_map': {
'currency': 'path',
'address': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/csv'
],
'content_type': [],
},
api_client=api_client,
callable=__list_tags_by_address_csv
)
| 36.114956
| 154
| 0.437752
| 4,958
| 61,576
| 5.209762
| 0.04518
| 0.033449
| 0.024158
| 0.025087
| 0.930236
| 0.916183
| 0.908169
| 0.906659
| 0.889895
| 0.874758
| 0
| 0.002017
| 0.476695
| 61,576
| 1,704
| 155
| 36.13615
| 0.799584
| 0.312898
| 0
| 0.727656
| 1
| 0
| 0.216746
| 0.034824
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010961
| false
| 0
| 0.009275
| 0
| 0.031197
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ddcaf2ed3ea0a4c2f736faef8c6ee83f6b04d8f1
| 239
|
py
|
Python
|
site-packages/serpent/game_launchers/__init__.py
|
nanpuhaha/SerpentAI
|
6af1105fc0a970227a0d7c11e6a0da1bd0bacec6
|
[
"MIT"
] | 6,762
|
2017-09-17T20:28:40.000Z
|
2022-03-31T12:35:47.000Z
|
site-packages/serpent/game_launchers/__init__.py
|
nanpuhaha/SerpentAI
|
6af1105fc0a970227a0d7c11e6a0da1bd0bacec6
|
[
"MIT"
] | 159
|
2017-09-19T21:54:58.000Z
|
2021-03-26T18:15:58.000Z
|
serpent/game_launchers/__init__.py
|
PiterPentester/SerpentAI
|
614bafd3c2df3ee6736309d46a7b92325f9a2d15
|
[
"MIT"
] | 880
|
2017-09-23T01:16:50.000Z
|
2022-03-27T18:58:30.000Z
|
from serpent.game_launchers.steam_game_launcher import SteamGameLauncher
from serpent.game_launchers.executable_game_launcher import ExecutableGameLauncher
from serpent.game_launchers.web_browser_game_launcher import WebBrowserGameLauncher
| 79.666667
| 83
| 0.92887
| 28
| 239
| 7.571429
| 0.464286
| 0.15566
| 0.212264
| 0.339623
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046025
| 239
| 3
| 83
| 79.666667
| 0.929825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fb01d2ac180de3dc4e102cf03f16ee1e1a5202c7
| 15,344
|
py
|
Python
|
main/dstport.py
|
naitwo2/NEEDLEWORK-ScenarioWriter
|
6e0d985d8ffd673d07d0f0b77b199746f2873d40
|
[
"MIT"
] | null | null | null |
main/dstport.py
|
naitwo2/NEEDLEWORK-ScenarioWriter
|
6e0d985d8ffd673d07d0f0b77b199746f2873d40
|
[
"MIT"
] | null | null | null |
main/dstport.py
|
naitwo2/NEEDLEWORK-ScenarioWriter
|
6e0d985d8ffd673d07d0f0b77b199746f2873d40
|
[
"MIT"
] | null | null | null |
from main import absorbdict
from main import multiple
dst_port_icmp = []
dst_port_tcp = []
dst_port_udp = []
# dst-portのリストの生成
def handle_dst_port_icmp():
global dst_port_icmp
append_list = dst_port_icmp
for policy in absorbdict.policy_dict:
if policy.get('dst_nat_port') is not None:
data = str(policy['dst_nat_port'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"PING"':
data = str("")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SMTP"':
data = str("NaN")
print('"SMTP"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SNMP"':
data = str("NaN")
print('"SNMP"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"NBDS"':
data = str("NaN")
print('"NBDS"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"FTP"':
data = str("NaN")
print('"FTP"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"HTTP"':
data = str("NaN")
print('"HTTP"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"HTTPS"':
data = str("NaN")
print('"HTTPS"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"NTP"':
data = str("NaN")
print('"NTP"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SYSLOG"':
data = str("NaN")
print('"SYSLOG"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"DNS"':
data = str("NaN")
print('"DNS"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"MAIL"':
data = str("NaN")
print('"MAIL"はicmpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"ANY"':
# icmpはdstportが存在しない
data = str("")
multiple.handle_multiple_ip(policy, append_list, data)
else:
for service_c in absorbdict.service_dict:
if service_c['service_name'] == policy['protocol'] and service_c['protocol_name'] == "icmp":
data = str(service_c['dst_port_num'].split('-')[1])
multiple.handle_multiple_ip(policy, append_list, data)
break
else:
continue
else:
if len(absorbdict.group_service_dict) >= 2:
service_name = []
for group_service_c in absorbdict.group_service_dict:
if group_service_c['group_service_name'] == policy['protocol']:
service_name += group_service_c['service_name']
for service_c in absorbdict.service_dict:
if service_c['service_name'] == service_name and service_c['protocol_name'] == "icmp":
data = str(
service_c['dst_port_num'].split('-')[1])
multiple.handle_multiple_ip(
policy, append_list, data)
break
else:
# TODO:最初のservice_nameにICMPが使用されていなければデフォが入ってしまい次以降でデフォが使用されていると異なる挙動となる
data = str("")
multiple.handle_multiple_ip(
policy, append_list, data)
break
else:
data = str("NaN")
print('service_nameでicmpが使用されていないため出力しませんでした')
print('policy_id = %sの出力をスキップしました' %
policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
else:
data = str("NaN")
print('service_nameでicmpが使用されていないため出力しませんでした')
print('policy_id = %sの出力をスキップしました' %
policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
handle_dst_port_icmp()
def handle_dst_port_tcp():
global dst_port_tcp
append_list = dst_port_tcp
for policy in absorbdict.policy_dict:
if policy.get('dst_nat_port') is not None:
data = str(policy['dst_nat_port'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SMTP"':
data = str("25")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SNMP"':
data = str("161") # 162
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"NBDS"':
data = str("NaN")
print('"NBDS"はtcpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"FTP"':
data = str("21")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"HTTP"':
data = str("80")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"HTTPS"':
data = str("443")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"NTP"':
data = str("123")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SYSLOG"':
data = str("NaN")
print('"SYSLOG"はtcpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"DNS"':
data = str("53")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"MAIL"':
data = str("25")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"PING"':
data = str("NaN")
print('"PING"はtcpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"ANY"':
# TODO:Any Any ANYの時の処理を変更する
if policy['src_ip'] == policy['dst_ip'] == '"Any"':
# 要修正
data = str("65535")
multiple.handle_multiple_ip(policy, append_list, data)
else:
data = str("80")
multiple.handle_multiple_ip(policy, append_list, data)
else:
for service_c in absorbdict.service_dict:
if service_c['service_name'] == policy['protocol'] and service_c['protocol_name'] == 'tcp':
data = str(service_c['dst_port_num'].split('-')[1])
multiple.handle_multiple_ip(policy, append_list, data)
break
else:
if len(absorbdict.group_service_dict) >= 2:
for group_service_c in absorbdict.group_service_dict:
if group_service_c['group_service_name'] == policy['protocol']:
service_name = group_service_c['service_name']
for service_c in absorbdict.service_dict:
if service_c['service_name'] == service_name and service_c['protocol_name'] == "tcp":
data = str(
service_c['dst_port_num'].split('-')[1])
multiple.handle_multiple_ip(
policy, append_list, data)
break
else:
# TODO:最初のservice_nameにTCPが使用されていなければデフォが入ってしまい次以降でデフォが使用されていると異なる挙動となる
data = str("80")
multiple.handle_multiple_ip(
policy, append_list, data)
break
else:
data = str("NaN")
print('service_nameでtcpが使用されていないため出力しませんでした')
print('policy_id = %sの出力をスキップしました' %
policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
else:
data = str("NaN")
print('service_nameでtcpが使用されていないため出力しませんでした')
print('policy_id = %sの出力をスキップしました' %
policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
handle_dst_port_tcp()
def handle_dst_port_udp():
global dst_port_udp
append_list = dst_port_udp
for policy in absorbdict.policy_dict:
if policy.get('dst_nat_port') is not None:
data = str(policy['dst_nat_port'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SMTP"':
data = str("NaN")
print('"SMTP"はudpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SNMP"':
data = str("161") # 162
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"NBDS"':
data = str("138")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"FTP"':
data = str("21")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"HTTP"':
data = str("NaN")
print('"HTTP"はudpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"HTTPS"':
data = str("NaN")
print('"HTTPS"はudpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"NTP"':
data = str("123")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"SYSLOG"':
data = str("514")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"DNS"':
data = str("53")
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"MAIL"':
data = str("NaN")
print('"MAIL"はudpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"PING"':
data = str("NaN")
print('"PING"はudpを使用しません')
print('policy_id = %sの出力をスキップしました' % policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
elif policy['protocol'] == '"ANY"':
if policy['src_ip'] == '"Any"' and policy['dst_ip'] == '"Any"':
# TODO:Any Any ANYの時の処理を変更する
data = str("65535")
multiple.handle_multiple_ip(policy, append_list, data)
else:
data = str("53")
multiple.handle_multiple_ip(policy, append_list, data)
else:
for service_c in absorbdict.service_dict:
if service_c['service_name'] == policy['protocol'] and service_c['protocol_name'] == "udp":
data = str(service_c['dst_port_num'].split('-')[1])
multiple.handle_multiple_ip(policy, append_list, data)
break
else:
continue
else:
if len(absorbdict.group_service_dict) >= 2:
for group_service_c in absorbdict.group_service_dict:
if group_service_c['group_service_name'] == policy['protocol']:
service_name = group_service_c['service_name']
for service_c in absorbdict.service_dict:
if service_c['service_name'] == service_name and service_c['protocol_name'] == "udp":
data = str(
service_c['dst_port_num'].split('-')[1])
multiple.handle_multiple_ip(
policy, append_list, data)
break
else:
continue
else:
# TODO:最初のservice_nameにUDPが使用されていなければデフォが入ってしまい次以降でデフォが使用されていると異なる挙動となる
data = str("53")
multiple.handle_multiple_ip(
policy, append_list, data)
break
else:
data = str("NaN")
print('service_nameでudpが使用されていないため出力しませんでした')
print('policy_id = %sの出力をスキップしました' %
policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
else:
data = str("NaN")
print('service_nameでudpが使用されていないため出力しませんでした')
print('policy_id = %sの出力をスキップしました' %
policy['policy_id'])
multiple.handle_multiple_ip(policy, append_list, data)
handle_dst_port_udp()
| 48.403785
| 118
| 0.517271
| 1,449
| 15,344
| 5.213251
| 0.059351
| 0.078104
| 0.163092
| 0.177919
| 0.922425
| 0.915939
| 0.915939
| 0.915939
| 0.915939
| 0.915939
| 0
| 0.007032
| 0.369786
| 15,344
| 316
| 119
| 48.556962
| 0.774147
| 0.020269
| 0
| 0.836177
| 0
| 0
| 0.166423
| 0.014512
| 0
| 0
| 0
| 0.003165
| 0
| 1
| 0.010239
| false
| 0
| 0.006826
| 0
| 0.017065
| 0.163823
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fb173eb6e45a9b09b6e0443a6c4fefcae8499aef
| 49
|
py
|
Python
|
backend/app/views/select_primers/__init__.py
|
Edinburgh-Genome-Foundry/CUBA
|
d57565951ead619ef9263e8b356b451001fb910f
|
[
"MIT"
] | 15
|
2018-02-12T13:12:13.000Z
|
2021-08-15T11:37:59.000Z
|
backend/app/views/select_primers/__init__.py
|
Edinburgh-Genome-Foundry/CUBA
|
d57565951ead619ef9263e8b356b451001fb910f
|
[
"MIT"
] | 9
|
2020-06-05T17:54:54.000Z
|
2022-02-12T12:03:19.000Z
|
backend/app/views/select_primers/__init__.py
|
Edinburgh-Genome-Foundry/CUBA
|
d57565951ead619ef9263e8b356b451001fb910f
|
[
"MIT"
] | 3
|
2018-10-18T13:08:50.000Z
|
2020-08-17T14:09:46.000Z
|
from .SelectPrimersView import SelectPrimersView
| 24.5
| 48
| 0.897959
| 4
| 49
| 11
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 49
| 1
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fb32c04fc928d9c0abfa81fba228068f0055ada3
| 98
|
py
|
Python
|
test/integration/src/py/wait_for_sif_account.py
|
salahgapr5/snif
|
fde05f68d8c6898f1826f6de6b24dcec71ea4e18
|
[
"Apache-2.0"
] | 85
|
2020-10-24T07:35:55.000Z
|
2022-03-27T10:27:55.000Z
|
test/integration/src/py/wait_for_sif_account.py
|
salahgapr5/snif
|
fde05f68d8c6898f1826f6de6b24dcec71ea4e18
|
[
"Apache-2.0"
] | 1,760
|
2020-10-24T07:37:17.000Z
|
2022-03-31T17:59:44.000Z
|
test/integration/src/py/wait_for_sif_account.py
|
salahgapr5/snif
|
fde05f68d8c6898f1826f6de6b24dcec71ea4e18
|
[
"Apache-2.0"
] | 115
|
2020-10-31T07:32:38.000Z
|
2022-03-29T21:12:25.000Z
|
import sys
from test_utilities import wait_for_sif_account
wait_for_sif_account(sys.argv[2], "")
| 19.6
| 47
| 0.826531
| 17
| 98
| 4.352941
| 0.647059
| 0.189189
| 0.27027
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0.091837
| 98
| 5
| 48
| 19.6
| 0.820225
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
348d17b14bd0d88acef9790726431f654add3cb5
| 174
|
py
|
Python
|
ding/envs/env_manager/__init__.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 464
|
2021-07-08T07:26:33.000Z
|
2022-03-31T12:35:16.000Z
|
ding/envs/env_manager/__init__.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 177
|
2021-07-09T08:22:55.000Z
|
2022-03-31T07:35:22.000Z
|
ding/envs/env_manager/__init__.py
|
sailxjx/DI-engine
|
c6763f8e2ba885a2a02f611195a1b5f8b50bff00
|
[
"Apache-2.0"
] | 92
|
2021-07-08T12:16:37.000Z
|
2022-03-31T09:24:41.000Z
|
from .base_env_manager import BaseEnvManager, create_env_manager, get_env_manager_cls
from .subprocess_env_manager import AsyncSubprocessEnvManager, SyncSubprocessEnvManager
| 58
| 87
| 0.908046
| 20
| 174
| 7.45
| 0.6
| 0.268456
| 0.214765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063218
| 174
| 2
| 88
| 87
| 0.91411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
34ed479112ef7ada0f18328391d6c4587f083456
| 4,783
|
py
|
Python
|
tests/parser/others.esra.suitcase.c.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/others.esra.suitcase.c.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/others.esra.suitcase.c.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
% The rules for up/2 have been rewritten to a disjunctive rule
% and constraints, yielding a guess-and-check-like program.
% Lin's suitcase domain:
% fluents: up(L), open
% actions: toggle(L)
% initially: the latches (i.e., l1 and l2) are down and the suitcase is
% closed
% goal: the latches are down and the suitcase is open
% classical negation of open, up, and toggle are represented by nopen,
% nup, and ntoggle respectively.
% action description (in C):
% inertial up(L), nup(L), open, nopen
% caused up(L) after toggle(L) /\ nup(L)
% caused nup(L) after toggle(L) /\ up(L)
% caused open if up(l1) /\ up(l2)
% transform into a logic program
% the new guess-and-check stuff
up(L,T) | -up(L,T) :- latch(L), time(T).
:- up(L,T), next(T,T1), up(L,T1), toggle(L,T).
:- -up(L,T), next(T,T1), -up(L,T1), toggle(L,T).
:- up(L,T), next(T,T1), -up(L,T1), -toggle(L,T).
:- -up(L,T), next(T,T1), up(L,T1), -toggle(L,T).
% the old rules
%up(L,T1) :- latch(L), next(T,T1), up(L,T), not -up(L,T1).
%-up(L,T1) :- latch(L), next(T,T1), -up(L,T), not up(L,T1).
%up(L,T1) :- latch(L), next(T,T1), toggle(L,T), -up(L,T).
%-up(L,T1) :- latch(L), next(T,T1), toggle(L,T), up(L,T).
open(T1) :- next(T,T1), open(T), not -open(T1).
-open(T1) :- next(T,T1), -open(T), not open(T1).
open(T) :- up(l1,T), up(l2,T).
% The following two rules are subsumed by the new disjunctive rule.
%up(L,0) :- latch(L), not -up(L,0).
%-up(L,0) :- latch(L), not up(L,0).
%up(L,0) | -up(L,0) :- latch(L).
% open(0) :- not -open(0).
% -open(0) :- not open(0).
open(0) | -open(0).
%toggle(L,T) :- latch(L), time(T), not last(T), not -toggle(L,T).
%-toggle(L,T) :- latch(L), time(T), not last(T), not toggle(L,T).
toggle(L,T) | -toggle(L,T) :- latch(L), time(T), not last(T).
latch(l1).
latch(l2).
time(0).
time(1).
time(2).
last(2).
next(0,1).
next(1,2).
:- up(l1,0).
:- up(l2,0).
:- open(0).
:- not open(2).
:- up(l1,2).
:- up(l2,2).
% OR in a more intuitive way, the above 6 ICs can be written as:
% goal :- open(2), not up(l1,0), not up(l2,0), not open(0), not up(l1,2),
% not up(l2,2).
% :- not goal.
% OR they can be written as a query (used with brave reasoning):
% open(2), not up(l1,0), not up(l2,0), not open(0), not up(l1,2), not
% up(l2,2)?
"""
output = """
% The rules for up/2 have been rewritten to a disjunctive rule
% and constraints, yielding a guess-and-check-like program.
% Lin's suitcase domain:
% fluents: up(L), open
% actions: toggle(L)
% initially: the latches (i.e., l1 and l2) are down and the suitcase is
% closed
% goal: the latches are down and the suitcase is open
% classical negation of open, up, and toggle are represented by nopen,
% nup, and ntoggle respectively.
% action description (in C):
% inertial up(L), nup(L), open, nopen
% caused up(L) after toggle(L) /\ nup(L)
% caused nup(L) after toggle(L) /\ up(L)
% caused open if up(l1) /\ up(l2)
% transform into a logic program
% the new guess-and-check stuff
up(L,T) | -up(L,T) :- latch(L), time(T).
:- up(L,T), next(T,T1), up(L,T1), toggle(L,T).
:- -up(L,T), next(T,T1), -up(L,T1), toggle(L,T).
:- up(L,T), next(T,T1), -up(L,T1), -toggle(L,T).
:- -up(L,T), next(T,T1), up(L,T1), -toggle(L,T).
% the old rules
%up(L,T1) :- latch(L), next(T,T1), up(L,T), not -up(L,T1).
%-up(L,T1) :- latch(L), next(T,T1), -up(L,T), not up(L,T1).
%up(L,T1) :- latch(L), next(T,T1), toggle(L,T), -up(L,T).
%-up(L,T1) :- latch(L), next(T,T1), toggle(L,T), up(L,T).
open(T1) :- next(T,T1), open(T), not -open(T1).
-open(T1) :- next(T,T1), -open(T), not open(T1).
open(T) :- up(l1,T), up(l2,T).
% The following two rules are subsumed by the new disjunctive rule.
%up(L,0) :- latch(L), not -up(L,0).
%-up(L,0) :- latch(L), not up(L,0).
%up(L,0) | -up(L,0) :- latch(L).
% open(0) :- not -open(0).
% -open(0) :- not open(0).
open(0) | -open(0).
%toggle(L,T) :- latch(L), time(T), not last(T), not -toggle(L,T).
%-toggle(L,T) :- latch(L), time(T), not last(T), not toggle(L,T).
toggle(L,T) | -toggle(L,T) :- latch(L), time(T), not last(T).
latch(l1).
latch(l2).
time(0).
time(1).
time(2).
last(2).
next(0,1).
next(1,2).
:- up(l1,0).
:- up(l2,0).
:- open(0).
:- not open(2).
:- up(l1,2).
:- up(l2,2).
% OR in a more intuitive way, the above 6 ICs can be written as:
% goal :- open(2), not up(l1,0), not up(l2,0), not open(0), not up(l1,2),
% not up(l2,2).
% :- not goal.
% OR they can be written as a query (used with brave reasoning):
% open(2), not up(l1,0), not up(l2,0), not open(0), not up(l1,2), not
% up(l2,2)?
"""
| 26.72067
| 75
| 0.555509
| 926
| 4,783
| 2.86933
| 0.100432
| 0.067746
| 0.072262
| 0.026346
| 0.99586
| 0.99586
| 0.99586
| 0.99586
| 0.99586
| 0.99586
| 0
| 0.044456
| 0.20991
| 4,783
| 178
| 76
| 26.870787
| 0.65864
| 0
| 0
| 0.984127
| 0
| 0.253968
| 0.993274
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9b5f505e008a88dae11f7b9d40e564eda9df5522
| 5,113
|
py
|
Python
|
sqlpie/controllers/caching_controller.py
|
lessaworld/sqlpie
|
22cac1fc7f9cb939e823058f84a68988e03ab239
|
[
"MIT"
] | 3
|
2016-01-27T19:49:23.000Z
|
2020-08-18T13:59:02.000Z
|
sqlpie/controllers/caching_controller.py
|
lessaworld/sqlpie
|
22cac1fc7f9cb939e823058f84a68988e03ab239
|
[
"MIT"
] | null | null | null |
sqlpie/controllers/caching_controller.py
|
lessaworld/sqlpie
|
22cac1fc7f9cb939e823058f84a68988e03ab239
|
[
"MIT"
] | 1
|
2016-02-01T01:57:54.000Z
|
2016-02-01T01:57:54.000Z
|
# -*- coding: utf-8 -*-
"""
SQLpie License (MIT License)
Copyright (c) 2011-2016 André Lessa, http://sqlpie.com
See LICENSE file.
"""
from flask import Response
import json
import sqlpie
class CachingController(sqlpie.BaseController):
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_initialize(request):
json_data = request.get_json()
if not "bucket" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
bucket = json_data["bucket"]
capacity = json_data["capacity"] if "capacity" in json_data else 500
auto_flush = True if "auto_flush" in json_data and json_data["auto_flush"] else False
sqlpie.global_cache[bucket] = sqlpie.Caching(bucket, capacity, auto_flush)
return {'success': True}
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_add(request):
json_data = request.get_json()
if not "bucket" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
if not "key" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
bucket = json_data["bucket"]
key = json_data["key"]
expires_at = json_data["expires_at"] if "expires_at" in json_data else False
if bucket not in sqlpie.global_cache:
raise sqlpie.CustomException(sqlpie.CustomException.CACHE_IS_EMPTY)
cache = sqlpie.global_cache[bucket]
cache.add(key, expires_at)
return {'success': True}
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_put(request):
json_data = request.get_json()
if not "bucket" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
if not "key" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
if not "value" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
bucket = json_data["bucket"]
key = json_data["key"]
value = json_data["value"]
expires_at = json_data["expires_at"] if "expires_at" in json_data else False
if bucket not in sqlpie.global_cache:
raise sqlpie.CustomException(sqlpie.CustomException.CACHE_IS_EMPTY)
cache = sqlpie.global_cache[bucket]
cache.put(key, value, expires_at)
return {'success': True}
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_get(request):
json_data = request.get_json()
if not "bucket" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
if not "key" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
bucket = json_data["bucket"]
key = json_data["key"]
if bucket not in sqlpie.global_cache:
raise sqlpie.CustomException(sqlpie.CustomException.CACHE_IS_EMPTY)
cache = sqlpie.global_cache[bucket]
value = cache.get(key)
return {'success': True, 'bucket': bucket, 'key': key, 'value': value}
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_remove(request):
json_data = request.get_json()
if not "bucket" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
bucket = json_data["bucket"]
if bucket not in sqlpie.global_cache:
raise sqlpie.CustomException(sqlpie.CustomException.CACHE_IS_EMPTY)
cache = sqlpie.global_cache[bucket]
if "key" in json_data:
key = json_data["key"]
cache.remove(key)
else:
cache.remove()
return {'success': True}
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_flush(request):
json_data = request.get_json()
if not "bucket" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
bucket = json_data["bucket"]
if bucket not in sqlpie.global_cache:
raise sqlpie.CustomException(sqlpie.CustomException.CACHE_IS_EMPTY)
cache = sqlpie.global_cache[bucket]
cache.flush()
return {'success': True}
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_reset(request):
sqlpie.Cache().reset()
return {'success': True}
@staticmethod
@sqlpie.BaseController.controller_wrapper
def caching_destroy(request):
json_data = request.get_json()
if not "bucket" in json_data:
raise sqlpie.CustomException(sqlpie.CustomException.INVALID_ARGUMENTS)
bucket = json_data["bucket"]
if bucket not in sqlpie.global_cache:
raise sqlpie.CustomException(sqlpie.CustomException.CACHE_IS_EMPTY)
cache = sqlpie.global_cache[bucket]
cache.remove()
del sqlpie.global_cache[bucket]
return {'success': True}
| 39.330769
| 93
| 0.67612
| 582
| 5,113
| 5.749141
| 0.111684
| 0.093246
| 0.132098
| 0.162582
| 0.8159
| 0.8159
| 0.8159
| 0.780634
| 0.780634
| 0.780634
| 0
| 0.003067
| 0.234891
| 5,113
| 129
| 94
| 39.635659
| 0.852249
| 0.024252
| 0
| 0.756757
| 0
| 0
| 0.053023
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072072
| false
| 0
| 0.027027
| 0
| 0.18018
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9b6591b6a0dc1022c24265f112f4eb35ebfb2be2
| 6,047
|
py
|
Python
|
opac/tests/test_interface_footer.py
|
gitnnolabs/opac
|
c6f7bf90ab409118918b896971b4e77dd28753b5
|
[
"BSD-2-Clause"
] | 12
|
2016-11-23T17:49:07.000Z
|
2019-10-07T00:21:03.000Z
|
opac/tests/test_interface_footer.py
|
gitnnolabs/opac
|
c6f7bf90ab409118918b896971b4e77dd28753b5
|
[
"BSD-2-Clause"
] | 1,589
|
2015-12-03T17:47:10.000Z
|
2022-03-31T23:43:36.000Z
|
opac/tests/test_interface_footer.py
|
gitnnolabs/opac
|
c6f7bf90ab409118918b896971b4e77dd28753b5
|
[
"BSD-2-Clause"
] | 21
|
2015-11-13T18:59:33.000Z
|
2021-10-03T22:34:29.000Z
|
# coding: utf-8
from flask import url_for, current_app
from .base import BaseTestCase
from . import utils
class FooterTestCase(BaseTestCase):
def test_collection_open_access_home(self):
"""
Testa se os links e o conteúdo da licença esta de acordo com a licença
cadastrada na coleção.
"""
with current_app.app_context():
utils.makeOneCollection()
with self.client as c:
response = c.get(url_for('main.index'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/index.html')
self.assertIn(b'/static/img/oa_logo_32.png', response.data)
self.assertIn('href="%s"' % url_for('main.about_collection'), response.data.decode('utf-8'))
self.assertIn(b'Open Access', response.data)
def test_collection_open_access_list_theme(self):
"""
Testa se os links e o conteúdo da licença esta de acordo com a licença
cadastrada na coleção.
"""
with current_app.app_context():
utils.makeOneCollection()
with self.client as c:
response = c.get(url_for('main.collection_list'))
self.assertStatus(response, 200)
self.assertTemplateUsed('collection/list_journal.html')
self.assertIn(b'/static/img/oa_logo_32.png', response.data)
self.assertIn('href="%s"' % url_for('main.about_collection'), response.data.decode('utf-8'))
self.assertIn(b'Open Access', response.data)
def test_collection_open_access_journal_home(self):
"""
Testa se os links e o conteúdo da licença esta de acordo com a licença
cadastrada na coleção.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
with self.client as c:
response = c.get(url_for('main.journal_detail',
url_seg=journal.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('journal/detail.html')
self.assertIn(b'/static/img/oa_logo_32.png', response.data)
self.assertIn('href="%s"' % url_for('main.about_collection'), response.data.decode('utf-8'))
self.assertIn(b'Open Access', response.data)
def test_journal_open_access_journal_home(self):
"""
Testa se os links e o conteúdo da licença este de acordo com a licença
cadastrado no periódico.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
with self.client as c:
response = c.get(url_for('main.journal_detail',
url_seg=journal.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('journal/detail.html')
self.assertIn(b'/static/img/oa_logo_32.png', response.data)
self.assertIn('href="%s"' % url_for('main.about_collection'), response.data.decode('utf-8'))
self.assertIn(b'Open Access', response.data)
def test_journal_open_access_in_issue_grid(self):
"""
Testa se os links e o conteúdo da licença este de acordo com a licença
cadastrado no periódico.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
utils.makeOneIssue({'journal': journal})
with self.client as c:
response = c.get(url_for('main.issue_grid',
url_seg=journal.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/grid.html')
self.assertIn(b'/static/img/oa_logo_32.png', response.data)
self.assertIn('href="%s"' % url_for('main.about_collection'), response.data.decode('utf-8'))
self.assertIn(b'Open Access', response.data)
def test_journal_open_access_in_issue_toc(self):
"""
Testa se os links e o conteúdo da licença este de acordo com a licença
cadastrado no periódico.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
issue = utils.makeOneIssue({'journal': journal})
with self.client as c:
response = c.get(url_for('main.issue_toc',
url_seg=issue.journal.url_segment,
url_seg_issue=issue.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('issue/toc.html')
self.assertIn(b'/static/img/oa_logo_32.png', response.data)
self.assertIn('href="%s"' % url_for('main.about_collection'), response.data.decode('utf-8'))
self.assertIn(b'Open Access', response.data)
def test_journal_open_access(self):
"""
Testa se os links e o conteúdo da licença este de acordo com a licença
cadastrado no periódico.
"""
with current_app.app_context():
utils.makeOneCollection()
journal = utils.makeOneJournal()
with self.client as c:
response = c.get(url_for('main.journal_detail',
url_seg=journal.url_segment))
self.assertStatus(response, 200)
self.assertTemplateUsed('journal/detail.html')
# Collection license
self.assertIn(b'/static/img/oa_logo_32.png', response.data)
self.assertIn('href="%s"' % url_for('main.about_collection'), response.data.decode('utf-8'))
self.assertIn(b'Open Access', response.data)
| 34.357955
| 108
| 0.580949
| 686
| 6,047
| 4.96793
| 0.120991
| 0.073944
| 0.04108
| 0.026702
| 0.928404
| 0.920481
| 0.920481
| 0.885857
| 0.885857
| 0.86561
| 0
| 0.010389
| 0.315528
| 6,047
| 175
| 109
| 34.554286
| 0.812998
| 0.115595
| 0
| 0.735632
| 0
| 0
| 0.149932
| 0.073699
| 0
| 0
| 0
| 0
| 0.402299
| 1
| 0.08046
| false
| 0
| 0.034483
| 0
| 0.126437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9b7e2a7eea02c9b8b54bac704cfd550258fd7e28
| 12,810
|
py
|
Python
|
tests/api/viewsets/test_chat_viewsets.py
|
oil-rope/oil-and-rope
|
6d59c87d4809f120417a90c1624952085486bb06
|
[
"MIT"
] | 8
|
2019-08-27T20:08:22.000Z
|
2021-07-23T22:49:47.000Z
|
tests/api/viewsets/test_chat_viewsets.py
|
oil-rope/oil-and-rope
|
6d59c87d4809f120417a90c1624952085486bb06
|
[
"MIT"
] | 73
|
2020-03-11T18:07:29.000Z
|
2022-03-28T18:07:47.000Z
|
tests/api/viewsets/test_chat_viewsets.py
|
oil-rope/oil-and-rope
|
6d59c87d4809f120417a90c1624952085486bb06
|
[
"MIT"
] | 4
|
2020-02-22T19:44:17.000Z
|
2022-03-08T09:42:45.000Z
|
from django.apps import apps
from django.shortcuts import reverse
from faker import Faker
from model_bakery import baker
from rest_framework import status
from rest_framework.test import APITestCase
from common.constants import models
fake = Faker()
Chat = apps.get_model(models.CHAT_MODEL)
ChatMessage = apps.get_model(models.CHAT_MESSAGE_MODEL)
User = apps.get_model(models.USER_MODEL)
base_resolver = 'api:chat'
class TestChatAPIRootViewSet(APITestCase):
def test_anonymous_list_urls_ok(self):
url = reverse(f'{base_resolver}:api-root')
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
class TestChatViewSet(APITestCase):
@classmethod
def setUpTestData(cls):
cls.model = Chat
cls.user = baker.make(User)
cls.admin_user = baker.make(User, is_staff=True)
def test_anonymous_list_ko(self):
url = reverse(f'{base_resolver}:chat-list')
response = self.client.get(url)
self.assertEqual(status.HTTP_403_FORBIDDEN, response.status_code)
def test_authenticated_chat_list_ok(self):
url = reverse(f'{base_resolver}:chat-list')
self.client.force_login(self.user)
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_chat_list_only_user_ok(self):
"""
Checks if user can only see its own chat.
"""
url = reverse(f'{base_resolver}:chat-list')
self.client.force_login(self.user)
# Creating data
baker.make(_model=self.model, _quantity=fake.pyint(min_value=1, max_value=10))
baker.make(_model=self.model, _quantity=fake.pyint(min_value=1, max_value=10), users=[self.user])
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
data = response.json()['results']
expected_data = self.model.objects.filter(
users__in=[self.user],
).count()
self.assertEqual(expected_data, len(data))
def test_authenticated_admin_chat_list_ok(self):
"""
Checks if admin can list every chat.
"""
url = reverse(f'{base_resolver}:chat-list')
self.client.force_login(self.admin_user)
# Creating data
baker.make(_model=self.model, _quantity=fake.pyint(min_value=1, max_value=10))
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
data = response.json()['results']
expected_data = self.model.objects.count()
self.assertEqual(expected_data, len(data))
def test_anonymous_detail_ko(self):
chat = baker.make(self.model)
url = reverse(f'{base_resolver}:chat-detail', kwargs={'pk': chat.pk})
response = self.client.get(url)
self.assertEqual(status.HTTP_403_FORBIDDEN, response.status_code)
def test_authenticated_user_not_in_chat_detail_ko(self):
chat = baker.make(self.model)
url = reverse(f'{base_resolver}:chat-detail', kwargs={'pk': chat.pk})
self.client.force_login(self.user)
response = self.client.get(url)
self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
def test_authenticated_user_in_chat_detail_ok(self):
chat = baker.make(self.model, users=[self.user])
baker.make(_model=self.model, _quantity=fake.pyint(min_value=1, max_value=10))
url = reverse(f'{base_resolver}:chat-detail', kwargs={'pk': chat.pk})
self.client.force_login(self.user)
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_admin_chat_detail_ok(self):
chat = baker.make(self.model)
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:chat-detail', kwargs={'pk': chat.pk})
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
class TestChatMessageViewSet(APITestCase):
@classmethod
def setUpTestData(cls):
cls.model = ChatMessage
cls.user = baker.make(User)
cls.admin_user = baker.make(User, is_staff=True)
cls.chat_with_user_in_it = baker.make(Chat, users=[cls.user])
cls.chat_without_user_in_it = baker.make(Chat)
def test_anonymous_message_list_ko(self):
url = reverse(f'{base_resolver}:message-list')
response = self.client.get(url)
self.assertEqual(status.HTTP_403_FORBIDDEN, response.status_code)
def test_authenticated_not_admin_message_list_ok(self):
baker.make(
_model=self.model, _quantity=fake.pyint(min_value=1, max_value=10),
message=fake.word(), author_id=self.user.pk,
)
baker.make(_model=self.model, _quantity=fake.pyint(min_value=1, max_value=10), message=fake.word())
url = reverse(f'{base_resolver}:message-list')
self.client.force_login(self.user)
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
expected_data = self.model.objects.filter(
author_id=self.user.id
).count()
data = response.json()['results']
self.assertEqual(expected_data, len(data))
def test_authenticated_admin_message_list_ok(self):
baker.make(_model=self.model, _quantity=fake.pyint(min_value=1, max_value=10), message=fake.word())
url = reverse(f'{base_resolver}:message-list')
self.client.force_login(self.admin_user)
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
expected_data = self.model.objects.count()
data = response.json()['results']
self.assertEqual(expected_data, len(data))
def test_authenticated_not_admin_user_in_chat_message_create_ok(self):
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-list')
data = {
'chat': self.chat_with_user_in_it.pk,
'message': fake.word(),
}
response = self.client.post(path=url, data=data)
self.assertEqual(status.HTTP_201_CREATED, response.status_code)
def test_authenticated_not_admin_user_not_in_chat_message_create_ko(self):
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-list')
data = {
'chat': self.chat_without_user_in_it.pk,
'message': fake.word(),
}
response = self.client.post(path=url, data=data)
self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
def test_authenticated_admin_user_in_chat_message_create_ok(self):
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-list')
data = {
'chat': self.chat_with_user_in_it.pk,
'message': fake.word(),
'author': self.user.pk,
}
response = self.client.post(path=url, data=data)
self.assertEqual(status.HTTP_201_CREATED, response.status_code)
def test_authenticated_admin_user_not_in_chat_message_create_ko(self):
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-list')
data = {
'chat': self.chat_without_user_in_it.pk,
'message': fake.word(),
'author': self.user.pk,
}
response = self.client.post(path=url, data=data)
self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
def test_anonymous_message_detail_ko(self):
message = baker.make(self.model)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.get(url)
self.assertEqual(status.HTTP_403_FORBIDDEN, response.status_code)
def test_authenticated_not_admin_author_message_detail_ok(self):
message = baker.make(self.model, author=self.user)
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_not_admin_not_author_message_detail_ko(self):
message = baker.make(self.model)
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.get(url)
self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
def test_authenticated_admin_author_message_detail_ok(self):
message = baker.make(self.model, author=self.admin_user)
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_admin_not_author_message_detail_ok(self):
message = baker.make(self.model, author=self.user)
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.get(url)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_not_admin_author_message_partial_update_ok(self):
message = baker.make(self.model, author=self.user)
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
data = {
'message': fake.word(),
}
response = self.client.patch(path=url, data=data)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_not_admin_not_author_message_partial_update_ko(self):
message = baker.make(self.model)
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
data = {
'message': fake.word(),
}
response = self.client.patch(path=url, data=data)
self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
def test_authenticated_admin_author_message_update_ok(self):
message = baker.make(self.model, author=self.admin_user)
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
data = {
'message': fake.word(),
'chat': message.chat.pk,
'author': self.admin_user.pk,
}
response = self.client.put(path=url, data=data)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_admin_not_author_message_update_ok(self):
message = baker.make(self.model, author=self.user)
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
data = {
'message': fake.word(),
'chat': message.chat.pk,
'author': self.user.pk,
}
response = self.client.put(path=url, data=data)
self.assertEqual(status.HTTP_200_OK, response.status_code)
def test_authenticated_not_admin_author_message_delete_ok(self):
message = baker.make(self.model, author=self.user)
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.delete(url)
self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code)
def test_authenticated_not_admin_not_author_message_delete_ko(self):
message = baker.make(self.model)
self.client.force_login(self.user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.delete(url)
self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
def test_authenticated_admin_author_message_delete_ok(self):
message = baker.make(self.model, author=self.admin_user)
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.delete(url)
self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code)
def test_authenticated_admin_not_author_message_delete_ok(self):
message = baker.make(self.model)
self.client.force_login(self.admin_user)
url = reverse(f'{base_resolver}:message-detail', kwargs={'pk': message.pk})
response = self.client.delete(url)
self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code)
| 38.238806
| 107
| 0.679703
| 1,685
| 12,810
| 4.913947
| 0.067656
| 0.06401
| 0.038527
| 0.052536
| 0.919203
| 0.910386
| 0.903502
| 0.886111
| 0.872101
| 0.852899
| 0
| 0.010568
| 0.202186
| 12,810
| 334
| 108
| 38.353293
| 0.799609
| 0.008353
| 0
| 0.729508
| 0
| 0
| 0.078388
| 0.064638
| 0
| 0
| 0
| 0
| 0.135246
| 1
| 0.127049
| false
| 0
| 0.028689
| 0
| 0.168033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b7efac0bf07c1b29fdb28ebcd6199f5aee27ffa
| 5,111
|
py
|
Python
|
21-fs-ias-lec/groups/08-P2PGeocaching/src/P2PGeocaching_pkg/RSA/TestTransformation.py
|
LarsWaldvogel/BACnet
|
556431ce8306f7b556d6b6e98dc3f798f994e73d
|
[
"MIT"
] | null | null | null |
21-fs-ias-lec/groups/08-P2PGeocaching/src/P2PGeocaching_pkg/RSA/TestTransformation.py
|
LarsWaldvogel/BACnet
|
556431ce8306f7b556d6b6e98dc3f798f994e73d
|
[
"MIT"
] | null | null | null |
21-fs-ias-lec/groups/08-P2PGeocaching/src/P2PGeocaching_pkg/RSA/TestTransformation.py
|
LarsWaldvogel/BACnet
|
556431ce8306f7b556d6b6e98dc3f798f994e73d
|
[
"MIT"
] | 1
|
2021-04-15T10:35:37.000Z
|
2021-04-15T10:35:37.000Z
|
import unittest
from src.P2PGeocaching_pkg.RSA.RSATransformation import \
RSATransformation
class TestTransformation(unittest.TestCase):
def testEncodingAndDecoding(self):
trans = RSATransformation()
privateKey = [4607, 20467]
publicKey = [16703, 20467]
list0 = trans.encoding("Lars Waldvogel", privateKey)
list1 = trans.encoding("Kapilas Gnanarajah", privateKey)
list2 = trans.encoding("Emmanuel Arackal", privateKey)
message0 = trans.decoding(list0, publicKey)
message1 = trans.decoding(list1, publicKey)
message2 = trans.decoding(list2, publicKey)
self.assertEqual(message0, "Lars Waldvogel")
self.assertEqual(message1, "Kapilas Gnanarajah")
self.assertEqual(message2, "Emmanuel Arackal")
def testExceptionsOfEncoding(self):
trans = RSATransformation()
a = []
self.assertEqual(trans.encoding("Test", 1), -1, "No Exception was detected for int")
self.assertEqual(trans.encoding("Test", 2.3), -1, "No Exception was detected for double")
self.assertEqual(trans.encoding("Test", "Hello"), -1, "No Exception was detected for string")
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for empty list")
a = [1]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for list with one element")
self.assertEqual(trans.encoding("Test", float(126)), -1, "No Exception was detected for float")
a = [10, "Hi"]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, 2.3]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, float(126)]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, -1]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = ["Hi", 10]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [2.3, 10]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [float(126), 10]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [-1, 10]
self.assertEqual(trans.encoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, 10]
self.assertEqual(trans.encoding(1, a), -1, "No Exception was detected for int")
self.assertEqual(trans.encoding(2.3, a), -1, "No Exception was detected for double")
self.assertEqual(trans.encoding(a, a), -1, "No Exception was detected for list")
self.assertEqual(trans.encoding(float(126), a), -1, "No Exception was detected for float")
def testExceptionsOfDecoding(self):
trans = RSATransformation()
a = []
self.assertEqual(trans.decoding("Test", 1), -1, "No Exception was detected for int")
self.assertEqual(trans.decoding("Test", 2.3), -1, "No Exception was detected for double")
self.assertEqual(trans.decoding("Test", "Hello"), -1, "No Exception was detected for string")
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for empty list")
a = [1]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for list with one element")
self.assertEqual(trans.decoding("Test", float(126)), -1, "No Exception was detected for float")
a = [10, "Hi"]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, 2.3]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, float(126)]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, -1]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = ["Hi", 10]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [2.3, 10]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [float(126), 10]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [-1, 10]
self.assertEqual(trans.decoding("Test", a), -1, "No Exception was detected for incorrect list")
a = [10, 10]
self.assertEqual(trans.decoding(1, a), -1, "No Exception was detected for int")
self.assertEqual(trans.decoding(2.3, a), -1, "No Exception was detected for double")
self.assertEqual(trans.decoding("Hallo", a), -1, "No Exception was detected for string")
self.assertEqual(trans.decoding(float(126), a), -1, "No Exception was detected for float")
if __name__ == '__main__':
unittest.main()
| 58.079545
| 110
| 0.644492
| 656
| 5,111
| 5.007622
| 0.099085
| 0.178082
| 0.219178
| 0.164384
| 0.796956
| 0.782344
| 0.782344
| 0.753425
| 0.743988
| 0.743988
| 0
| 0.039461
| 0.216592
| 5,111
| 87
| 111
| 58.747126
| 0.780969
| 0
| 0
| 0.506173
| 0
| 0
| 0.329354
| 0
| 0
| 0
| 0
| 0
| 0.481481
| 1
| 0.037037
| false
| 0
| 0.024691
| 0
| 0.074074
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9b8b6cae82cb25cb70639755ce6f786daf403393
| 259
|
py
|
Python
|
onshape_to_robot/__init__.py
|
MisoRobotics/onshape-to-robot
|
1abb2397f27c332b20af83a3c8b9d9e143c2ba66
|
[
"MIT"
] | null | null | null |
onshape_to_robot/__init__.py
|
MisoRobotics/onshape-to-robot
|
1abb2397f27c332b20af83a3c8b9d9e143c2ba66
|
[
"MIT"
] | null | null | null |
onshape_to_robot/__init__.py
|
MisoRobotics/onshape-to-robot
|
1abb2397f27c332b20af83a3c8b9d9e143c2ba66
|
[
"MIT"
] | null | null | null |
"""Export models from OnShape to other robotics formats."""
from . import bullet # noqa: F401
from . import clear_cache # noqa: F401
from . import edit_shape # noqa: F401
from . import onshape_to_robot # noqa: F401
from . import pure_sketch # noqa: F401
| 37
| 59
| 0.72973
| 38
| 259
| 4.842105
| 0.5
| 0.271739
| 0.26087
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07109
| 0.185328
| 259
| 6
| 60
| 43.166667
| 0.800948
| 0.420849
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
fd62c9cb3a5f2cdc239935c63e85f2a1a3b640e5
| 4,244
|
py
|
Python
|
tests/importers/test_get_importers.py
|
andreroggeri/br-to-ynab
|
c5d0ef3804bb575badc05ac6dc771f6a9281f955
|
[
"MIT"
] | 5
|
2021-09-20T13:15:37.000Z
|
2022-03-01T01:03:27.000Z
|
tests/importers/test_get_importers.py
|
andreroggeri/br-to-ynab
|
c5d0ef3804bb575badc05ac6dc771f6a9281f955
|
[
"MIT"
] | 4
|
2021-04-28T14:11:42.000Z
|
2021-10-09T16:18:15.000Z
|
tests/importers/test_get_importers.py
|
andreroggeri/br-to-ynab
|
c5d0ef3804bb575badc05ac6dc771f6a9281f955
|
[
"MIT"
] | 1
|
2021-09-27T15:13:30.000Z
|
2021-09-27T15:13:30.000Z
|
from unittest.mock import MagicMock, patch
import pynubank
from brbanks2ynab.config.config import ImporterConfig
from brbanks2ynab.importers import get_importers_for_bank, NubankCreditCardData, NubankCheckingAccountData, \
BradescoCreditCard, BradescoCheckingAccount, AleloAlimentacaoImporter, AleloRefeicaoImporter
from brbanks2ynab.importers.alelo.alelo_flex_card import AleloFlexImporter
def test_should_get_only_nubank_card_importer(monkeypatch, config_for_nubank, ynab_account):
monkeypatch.setattr('pynubank.nubank.HttpClient', pynubank.MockHttpClient)
config = ImporterConfig.from_dict(config_for_nubank)
config.nubank.checking_account = None
ynab_account.name = config.nubank.credit_card_account
accounts = [ynab_account]
importers = get_importers_for_bank('Nubank', config, accounts)
assert len(importers) == 1
assert isinstance(importers[0], NubankCreditCardData)
def test_should_get_only_nubank_checking_account_importer(monkeypatch, config_for_nubank, ynab_account):
monkeypatch.setattr('pynubank.nubank.HttpClient', pynubank.MockHttpClient)
config = ImporterConfig.from_dict(config_for_nubank)
config.nubank.credit_card_account = None
ynab_account.name = config.nubank.checking_account
accounts = [ynab_account]
importers = get_importers_for_bank('Nubank', config, accounts)
assert len(importers) == 1
assert isinstance(importers[0], NubankCheckingAccountData)
@patch('builtins.input', lambda *args: '123456')
def test_should_get_only_bradesco_card_importer(monkeypatch, config_for_bradesco, ynab_account):
monkeypatch.setattr('brbanks2ynab.importers.Bradesco', MagicMock())
config = ImporterConfig.from_dict(config_for_bradesco)
config.bradesco.checking_account = None
ynab_account.name = config.bradesco.credit_card_account
accounts = [ynab_account]
importers = get_importers_for_bank('Bradesco', config, accounts)
assert len(importers) == 1
assert isinstance(importers[0], BradescoCreditCard)
@patch('builtins.input', lambda *args: '123456')
def test_should_get_only_bradesco_checking_account_importer(monkeypatch, config_for_bradesco, ynab_account):
monkeypatch.setattr('brbanks2ynab.importers.Bradesco', MagicMock())
config = ImporterConfig.from_dict(config_for_bradesco)
config.bradesco.credit_card_account = None
ynab_account.name = config.bradesco.checking_account
accounts = [ynab_account]
importers = get_importers_for_bank('Bradesco', config, accounts)
assert len(importers) == 1
assert isinstance(importers[0], BradescoCheckingAccount)
def test_should_get_only_alelo_flex_importer(monkeypatch, config_for_alelo, ynab_account):
monkeypatch.setattr('brbanks2ynab.importers.Alelo', MagicMock())
config = ImporterConfig.from_dict(config_for_alelo)
config.alelo.refeicao_account = None
config.alelo.alimentacao_account = None
ynab_account.name = config.alelo.flex_account
accounts = [ynab_account]
importers = get_importers_for_bank('Alelo', config, accounts)
assert len(importers) == 1
assert isinstance(importers[0], AleloFlexImporter)
def test_should_get_only_alelo_alimentacao_importer(monkeypatch, config_for_alelo, ynab_account):
monkeypatch.setattr('brbanks2ynab.importers.Alelo', MagicMock())
config = ImporterConfig.from_dict(config_for_alelo)
config.alelo.refeicao_account = None
config.alelo.flex_account = None
ynab_account.name = config.alelo.alimentacao_account
accounts = [ynab_account]
importers = get_importers_for_bank('Alelo', config, accounts)
assert len(importers) == 1
assert isinstance(importers[0], AleloAlimentacaoImporter)
def test_should_get_only_alelo_refeicao_importer(monkeypatch, config_for_alelo, ynab_account):
monkeypatch.setattr('brbanks2ynab.importers.Alelo', MagicMock())
config = ImporterConfig.from_dict(config_for_alelo)
config.alelo.flex_account = None
config.alelo.alimentacao_account = None
ynab_account.name = config.alelo.refeicao_account
accounts = [ynab_account]
importers = get_importers_for_bank('Alelo', config, accounts)
assert len(importers) == 1
assert isinstance(importers[0], AleloRefeicaoImporter)
| 40.419048
| 109
| 0.792177
| 485
| 4,244
| 6.628866
| 0.113402
| 0.071851
| 0.037325
| 0.047278
| 0.842924
| 0.837947
| 0.778538
| 0.733437
| 0.707309
| 0.707309
| 0
| 0.009137
| 0.123233
| 4,244
| 104
| 110
| 40.807692
| 0.854878
| 0
| 0
| 0.581081
| 0
| 0
| 0.066211
| 0.046654
| 0
| 0
| 0
| 0
| 0.189189
| 1
| 0.094595
| false
| 0
| 0.621622
| 0
| 0.716216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
bd06b86d62863eed7dbf49abfb192e494342bad7
| 10,759
|
py
|
Python
|
app/frontend/api/controller.py
|
ok11/caleb
|
c7fff0a7a89c9e6bb94f2d04505f1de71d5a9fc2
|
[
"MIT"
] | null | null | null |
app/frontend/api/controller.py
|
ok11/caleb
|
c7fff0a7a89c9e6bb94f2d04505f1de71d5a9fc2
|
[
"MIT"
] | null | null | null |
app/frontend/api/controller.py
|
ok11/caleb
|
c7fff0a7a89c9e6bb94f2d04505f1de71d5a9fc2
|
[
"MIT"
] | null | null | null |
import http
import logging
import traceback
from flask import Blueprint, abort, request, jsonify
from app.frontend.api.model import *
from app.core.model import *
api = Blueprint('api', __name__, url_prefix='/api')
book_schema = BookSchema(strict=True)
books_schema = BookSchema(many=True, strict=True)
author_schema = AuthorSchema(strict=True)
authors_schema = AuthorSchema(many=True, strict=True)
series_schema = SeriesSchema(strict=True)
@api.route("/books/", methods=["GET"])
def get_books():
try:
books = Book.query.all()
response = books_schema.jsonify(books)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/books/", methods=["POST"])
def create_book():
try:
j = request.get_json()
data, err = book_schema.load(j)
book_dict = {k: data[k] for (k, data[k]) in data.items() if k not in [
'authors', 'languages', 'tags', 'identifiers', 'series'
]
} # TODO: fill in with relationships from Book
book = Book(**book_dict)
for author in data.get('authors', []):
book.authors.append(Author(**author))
for language in data.get('languages', []):
book.languages.append(Language(**language))
for tag in data.get('tags', []):
book.tags.append(Tag(**tag))
for ident in data.get('identifiers', []):
book.identifiers.append(Identifier(**ident))
for series in data.get('series', []):
book.series.append(Series(**series))
book.save()
response = book_schema.jsonify(book)
response.status_code = http.HTTPStatus.CREATED
response.headers['location'] = '/api/books/{}'.format(book.id)
response.headers['content-type'] = 'application/json'
return response
except ValidationError as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.BAD_REQUEST, e)
except Exception as e:
# trace = traceback.format_exc()
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/books/<string:id>", methods=["GET"])
def get_book(id):
try:
book = Book.query.get(id)
if book:
response = book_schema.jsonify(book)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/books/<string:id>", methods=["PUT"])
def update_book(id):
try:
book = Book.query.get(id)
if book:
j = request.get_json()
data, err = book_schema.load(j)
book.update(**data)
book.save()
response = book_schema.jsonify(book)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Book not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/books/<string:id>/authors/", methods=["GET"])
def get_book_authors(id):
try:
book = Book.query.get(id)
if book:
response = authors_schema.jsonify(book.authors)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Book not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/books/<string:id>/authors/", methods=["POST"])
def create_book_author(id):
try:
book = Book.query.get(id)
if book:
j = request.get_json()
data, err = author_schema.load(j)
author = Author(**data)
author.append(book)
author.save()
response = author_schema.jsonify(author)
response.status_code = http.HTTPStatus.CREATED
response.headers['location'] = '/api/books/{}/authors/{}'.format(book.id, author.id)
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Book not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/authors/", methods=["GET"])
def get_authors():
try:
authors = Author.query.all()
response = authors_schema.jsonify(authors)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/authors/", methods=["POST"])
def create_author():
try:
j = request.get_json()
data, err = author_schema.load(j)
author = Author(**data)
author.save()
response = book_schema.jsonify(author)
response.status_code = http.HTTPStatus.CREATED
response.headers['location'] = '/api/books/{}'.format(author.id)
response.headers['content-type'] = 'application/json'
return response
except ValidationError as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.BAD_REQUEST, e)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/authors/<string:id>", methods=["GET"])
def get_author(id):
try:
author = Author.query.get(id)
if author:
response = author_schema.jsonify(author)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Author not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/authors/<string:id>", methods=["PUT"])
def update_author(id):
try:
author = Author.query.get(id)
if author:
j = request.get_json()
data, err = author_schema.load(j)
author.update(**data)
# author.name = data['name']
# author.sort = data['sort']
# author.link = data['link']
author.save()
response = author_schema.jsonify(author)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Author not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/series/", methods=["GET"])
def get_all_series():
try:
series = Series.query.all()
response = series_schema.jsonify(series)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/series/", methods=["POST"])
def create_series():
try:
j = request.get_json()
data, err = series_schema.load(j)
series = Series(**data)
series.save()
response = book_schema.jsonify(series)
response.status_code = http.HTTPStatus.CREATED
response.headers['location'] = '/api/books/{}'.format(series.id)
response.headers['content-type'] = 'application/json'
return response
except ValidationError as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.BAD_REQUEST, e)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/series/<string:id>", methods=["GET"])
def get_series(id):
try:
series = Series.query.get(id)
if series:
response = series_schema.jsonify(series)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Series not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/series/<string:id>", methods=["PUT"])
def update_series(id):
try:
series = Series.query.get(id)
if series:
j = request.get_json()
data, err = series_schema.load(j)
series.update(**data)
# series.name = data['name']
# series.sort = data['sort']
series.save()
response = series_schema.jsonify(series)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Series not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
@api.route("/series/<string:id>/books", methods=["GET"])
def get_series_books(id):
try:
series = Series.query.get(id)
if series:
books = series.books()
response = books_schema.jsonify(books)
response.status_code = http.HTTPStatus.OK
response.headers['content-type'] = 'application/json'
return response
else:
logging.debug("Series not found: {}".format(id))
return ('', http.HTTPStatus.NOT_FOUND)
except Exception as e:
logging.exception(e, exc_info=True)
abort(http.HTTPStatus.INTERNAL_SERVER_ERROR, e)
| 34.264331
| 96
| 0.610001
| 1,251
| 10,759
| 5.138289
| 0.078337
| 0.091475
| 0.053205
| 0.052894
| 0.818606
| 0.794337
| 0.784536
| 0.783136
| 0.78018
| 0.774736
| 0
| 0
| 0.260154
| 10,759
| 313
| 97
| 34.373802
| 0.807538
| 0.019983
| 0
| 0.720755
| 0
| 0
| 0.098719
| 0.009777
| 0
| 0
| 0
| 0.003195
| 0
| 1
| 0.056604
| false
| 0
| 0.022642
| 0
| 0.169811
| 0.007547
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd3f46517dda0beae2e46dc91c325ed577603880
| 252
|
py
|
Python
|
gd/utils/__init__.py
|
scottwedge/gd.py
|
328c9833abc949b1c9ac0eabe276bd66fead4c2c
|
[
"MIT"
] | null | null | null |
gd/utils/__init__.py
|
scottwedge/gd.py
|
328c9833abc949b1c9ac0eabe276bd66fead4c2c
|
[
"MIT"
] | null | null | null |
gd/utils/__init__.py
|
scottwedge/gd.py
|
328c9833abc949b1c9ac0eabe276bd66fead4c2c
|
[
"MIT"
] | null | null | null |
from gd.utils.async_utils import *
from gd.utils.decorators import *
from gd.utils.search_utils import *
from gd.utils.text_tools import *
from gd.utils.xml_parser import *
from gd.utils.enums import value_to_enum
from gd.utils import tasks
| 25.2
| 41
| 0.77381
| 42
| 252
| 4.5
| 0.380952
| 0.222222
| 0.407407
| 0.449735
| 0.232804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154762
| 252
| 9
| 42
| 28
| 0.887324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bd5033531d5fa9220ab5f7beb2c91968b1e8bcb2
| 6,313
|
py
|
Python
|
_jupyter/descriptors/.ipynb_checkpoints/validator-checkpoint.py
|
gdenn/my-blog
|
8bdf6c04c99c0edfcb34512b60053707604f2480
|
[
"MIT"
] | null | null | null |
_jupyter/descriptors/.ipynb_checkpoints/validator-checkpoint.py
|
gdenn/my-blog
|
8bdf6c04c99c0edfcb34512b60053707604f2480
|
[
"MIT"
] | null | null | null |
_jupyter/descriptors/.ipynb_checkpoints/validator-checkpoint.py
|
gdenn/my-blog
|
8bdf6c04c99c0edfcb34512b60053707604f2480
|
[
"MIT"
] | null | null | null |
{
"cells": [
{
"cell_type": "code",
"execution_count": 20,
"id": "eced0ee2",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:root:__set__: '2.2.2.2'\n"
]
},
{
"ename": "ValidationException",
"evalue": "5.5.5. is not a valid ipv4 address",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValidationException\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-20-4a9ac08ac09c>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 50\u001b[0m \u001b[0mflow\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mFlow\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 51\u001b[0m \u001b[0mflow\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msrc_ipv4\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"2.2.2.2\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 52\u001b[0;31m \u001b[0mflow\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdst_ipv4\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"5.5.5.\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m<ipython-input-20-4a9ac08ac09c>\u001b[0m in \u001b[0;36m__set__\u001b[0;34m(self, obj, value)\u001b[0m\n\u001b[1;32m 20\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__set__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 22\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalidate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 23\u001b[0m \u001b[0mlogging\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minfo\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"__set__: %r\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[0msetattr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mproperty_name\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m<ipython-input-20-4a9ac08ac09c>\u001b[0m in \u001b[0;36mvalidate\u001b[0;34m(self, value)\u001b[0m\n\u001b[1;32m 38\u001b[0m \u001b[0mipv4_pattern\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mre\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcompile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mr'^(?:[0-9]{1,3}\\.){3}[0-9]{1,3}$'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 39\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mipv4_pattern\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmatch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 40\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mValidationException\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"{} is not a valid ipv4 address\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 41\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mValidationException\u001b[0m: 5.5.5. is not a valid ipv4 address"
]
}
],
"source": [
"import logging\n",
"import re\n",
"from abc import ABC, abstractmethod\n",
"\n",
"logging.basicConfig(level=logging.INFO)\n",
"\n",
"\n",
"class ValidationException(Exception):\n",
" pass\n",
"\n",
"class PropertyWithValidator(ABC):\n",
"\n",
" def __set_name__(self, owner, name):\n",
" self.property_name = \"_\" + name\n",
" \n",
" def __get__(self, obj, obj_type=None):\n",
" value = getattr(obj, self.property_name)\n",
" logging.info(\"__get__: %r\", value)\n",
" return value\n",
" \n",
" def __set__(self, obj, value):\n",
" self.validate(value)\n",
" logging.info(\"__set__: %r\", value)\n",
" setattr(obj, self.property_name, value)\n",
"\n",
" @abstractmethod\n",
" def validate(self, value):\n",
" pass\n",
" \n",
" \n",
"class IPv4(PropertyWithValidator):\n",
" \n",
" def validate(self, value):\n",
" \n",
" if type(value) is not str:\n",
" raise ValidationException(\"{} must be a string to be a valid ipv4 address\".format(value))\n",
" \n",
" ipv4_pattern = re.compile(r'^(?:[0-9]{1,3}\\.){3}[0-9]{1,3}$')\n",
" if not ipv4_pattern.match(value):\n",
" raise ValidationException(\"{} is not a valid ipv4 address\".format(value))\n",
" \n",
"class Flow:\n",
" \n",
" src_ipv4 = IPv4()\n",
" dst_ipv4 = IPv4()\n",
"\n",
" \n",
"flow = Flow()\n",
"flow.src_ipv4 = \"2.2.2.2\"\n",
"flow.dst_ipv4 = \"5.5.5.\""
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "fc25871e",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
| 55.377193
| 1,348
| 0.606051
| 1,014
| 6,313
| 3.708087
| 0.148915
| 0.25133
| 0.360638
| 0.242021
| 0.618617
| 0.582713
| 0.559309
| 0.547606
| 0.509309
| 0.483511
| 0
| 0.258663
| 0.163472
| 6,313
| 113
| 1,349
| 55.867257
| 0.453323
| 0
| 0
| 0.238938
| 0
| 0.079646
| 0.817995
| 0.532077
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.017699
| 0.026549
| 0
| 0.026549
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1fbbc1801ef6abde48a7788f8aa1e207cb50484a
| 2,138
|
py
|
Python
|
build/lib/auto_instr/agilent34405dmm.py
|
arvind0790/auto_instr
|
6b4ff1c535a8124c6f8e92e5dddd71f9101f8c24
|
[
"MIT"
] | 1
|
2018-07-26T09:08:18.000Z
|
2018-07-26T09:08:18.000Z
|
auto_instr/agilent34405dmm.py
|
arvind0790/auto_instr
|
6b4ff1c535a8124c6f8e92e5dddd71f9101f8c24
|
[
"MIT"
] | null | null | null |
auto_instr/agilent34405dmm.py
|
arvind0790/auto_instr
|
6b4ff1c535a8124c6f8e92e5dddd71f9101f8c24
|
[
"MIT"
] | 1
|
2019-07-15T13:19:01.000Z
|
2019-07-15T13:19:01.000Z
|
class agi34405(object):
## DC VOLTAGE##
def read_DCV(instr):
instr.write("*CLS")
instr.write('INIT:CONT OFF;:ABORT')
instr.write("SENS:FUNC 'VOLT:DC'")
instr.write('SENS:VOLT:DC:RANG:AUTO 1')
instr.write('SENS:VOLT:DC:DIG 6')
instr.write('FORM:ELEM READ')
instr.write('TRIG:COUN 1')
instr.write('SAMP:COUN 1')
instr.write('TRIG:DEL 0')
instr.write('TRIG:SOUR IMM')
instr.write(':READ?')
data = instr.read()
return data
## DC CURRENT##
def read_DCI(instr):
instr.write("*CLS")
instr.write('INIT:CONT OFF;:ABORT')
instr.write("SENS:FUNC 'CURR:DC'")
instr.write('SENS:CURR:DC:RANG:AUTO 1')
instr.write('SENS:CURR:DC:DIG 6')
instr.write('FORM:ELEM READ')
instr.write('TRIG:COUN 1')
instr.write('SAMP:COUN 1')
instr.write('TRIG:DEL 0')
instr.write('TRIG:SOUR IMM')
instr.write(':READ?')
data = instr.read()
return data
##### RESISTANCE ########
def read_RES(instr):
instr.write("*CLS")
instr.write('INIT:CONT OFF;:ABORT')
instr.write("SENS:FUNC 'RES'")
instr.write('SENS:RES:RANG:AUTO 1')
instr.write('SENS:RES:DIG 6')
instr.write('FORM:ELEM READ')
instr.write('TRIG:COUN 1')
instr.write('SAMP:COUN 1')
instr.write('TRIG:DEL 0')
instr.write('TRIG:SOUR IMM')
instr.write(':READ?')
data = instr.read()
return data
##### AC VOLTAGE ########
def read_ACV(instr):
instr.write("*CLS")
instr.write('INIT:CONT OFF;:ABORT')
instr.write("SENS:FUNC 'VOLT:AC'")
instr.write('SENS:VOLT:AC:RANG:AUTO 1')
instr.write('SENS:VOLT:AC:DIG 6')
instr.write('FORM:ELEM READ')
instr.write('TRIG:COUN 1')
instr.write('SAMP:COUN 1')
instr.write('TRIG:DEL 0')
instr.write('TRIG:SOUR IMM')
instr.write(':READ?')
data = instr.read()
return data
| 32.393939
| 48
| 0.523386
| 275
| 2,138
| 4.054545
| 0.149091
| 0.394619
| 0.150673
| 0.107623
| 0.884305
| 0.8287
| 0.808072
| 0.735426
| 0.735426
| 0.735426
| 0
| 0.01679
| 0.303555
| 2,138
| 65
| 49
| 32.892308
| 0.732035
| 0.023854
| 0
| 0.701754
| 0
| 0
| 0.296371
| 0.033266
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070175
| false
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1feee67e50dbfac0e8d701f717216282a14ee19b
| 170
|
py
|
Python
|
src/nodes/corenodes/input/__init__.py
|
Correct-Syntax/GimelStudio
|
db6e2db35730e11bcb25f5ba82823e68b86003f1
|
[
"Apache-2.0"
] | 134
|
2021-02-27T08:28:09.000Z
|
2022-03-30T17:46:27.000Z
|
src/nodes/corenodes/input/__init__.py
|
Correct-Syntax/GimelStudio
|
db6e2db35730e11bcb25f5ba82823e68b86003f1
|
[
"Apache-2.0"
] | 127
|
2021-04-13T13:34:20.000Z
|
2022-02-14T21:16:12.000Z
|
src/nodes/corenodes/input/__init__.py
|
Correct-Syntax/GimelStudio
|
db6e2db35730e11bcb25f5ba82823e68b86003f1
|
[
"Apache-2.0"
] | 20
|
2021-03-23T20:06:05.000Z
|
2022-01-20T18:24:53.000Z
|
from .image_node import ImageNode
from .integer_node import IntegerNode
from .checkered_image_node import CheckeredImageNode
from .noise_image_node import NoiseImageNode
| 34
| 52
| 0.882353
| 22
| 170
| 6.545455
| 0.5
| 0.277778
| 0.3125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 170
| 4
| 53
| 42.5
| 0.935065
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9517d9455cea3c62fbd09f4b5abe152d6f2af07d
| 280
|
py
|
Python
|
core/learning_rate_schedules/__init__.py
|
wavce/classificationx
|
b3bb70c2329e4b7fa974c1608869a4c604d4452c
|
[
"MulanPSL-1.0"
] | 3
|
2021-01-05T14:34:34.000Z
|
2021-05-14T02:57:11.000Z
|
core/learning_rate_schedules/__init__.py
|
wavce/classificationx
|
b3bb70c2329e4b7fa974c1608869a4c604d4452c
|
[
"MulanPSL-1.0"
] | null | null | null |
core/learning_rate_schedules/__init__.py
|
wavce/classificationx
|
b3bb70c2329e4b7fa974c1608869a4c604d4452c
|
[
"MulanPSL-1.0"
] | null | null | null |
from .step_decay import StepDecay
from .tflr import PolynomialDecay, ExponentialDecay, CosineDecay, LinearCosineDecay, PiecewiseConstantDecay
__all__ = [
"StepDecay", "PolynomialDecay", "ExponentialDecay", "CosineDecay",
"LinearCosineDecay", "PiecewiseConstantDecay"
]
| 31.111111
| 107
| 0.789286
| 20
| 280
| 10.8
| 0.6
| 0.287037
| 0.388889
| 0.546296
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117857
| 280
| 8
| 108
| 35
| 0.874494
| 0
| 0
| 0
| 0
| 0
| 0.321429
| 0.078571
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1f8ea9b30f9580c4f3f59b0fdfd9308476171f71
| 186
|
py
|
Python
|
tests/integration/test_basic.py
|
jphkun/aeroframe
|
a5986177813db33e72ebe341865434c24e944878
|
[
"Apache-2.0"
] | 9
|
2020-05-25T22:04:29.000Z
|
2021-11-27T23:06:02.000Z
|
tests/integration/test_basic.py
|
jphkun/aeroframe
|
a5986177813db33e72ebe341865434c24e944878
|
[
"Apache-2.0"
] | 4
|
2019-09-17T13:48:26.000Z
|
2021-11-11T20:39:53.000Z
|
tests/integration/test_basic.py
|
jphkun/aeroframe
|
a5986177813db33e72ebe341865434c24e944878
|
[
"Apache-2.0"
] | 5
|
2019-09-20T18:47:17.000Z
|
2020-05-25T17:41:38.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import aeroframe.__version__ as v
def test_version():
assert isinstance(v.__version__, str)
assert isinstance(v.VERSION, tuple)
| 18.6
| 41
| 0.698925
| 25
| 186
| 4.84
| 0.72
| 0.264463
| 0.280992
| 0.396694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012821
| 0.16129
| 186
| 9
| 42
| 20.666667
| 0.762821
| 0.231183
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1f94d666dfe30357cb2fa430820aa6655bd28bc3
| 7,470
|
py
|
Python
|
py/orbit/diagnostics/diagnostics.py
|
LeoRya/py-orbit
|
340b14b6fd041ed8ec2cc25b0821b85742aabe0c
|
[
"MIT"
] | 17
|
2018-02-09T23:39:06.000Z
|
2022-03-04T16:27:04.000Z
|
py/orbit/diagnostics/diagnostics.py
|
LeoRya/py-orbit
|
340b14b6fd041ed8ec2cc25b0821b85742aabe0c
|
[
"MIT"
] | 22
|
2017-05-31T19:40:14.000Z
|
2021-09-24T22:07:47.000Z
|
py/orbit/diagnostics/diagnostics.py
|
LeoRya/py-orbit
|
340b14b6fd041ed8ec2cc25b0821b85742aabe0c
|
[
"MIT"
] | 37
|
2016-12-08T19:39:35.000Z
|
2022-02-11T19:59:34.000Z
|
#!/usr/bin/env python
"""
This is not a parallel version!
"""
# for mpi operations
import orbit_mpi
from orbit_mpi import mpi_comm
from orbit_mpi import mpi_datatype
from orbit_mpi import mpi_op
import math
import random
import sys
from bunch import BunchTwissAnalysis
from bunch import BunchTuneAnalysis
from orbit.utils.consts import speed_of_light
class StatLats:
"""
This class gathers delivers the statistical twiss parameters
"""
def __init__(self, filename):
self.file_out = open(filename,"a")
self.bunchtwissanalysis = BunchTwissAnalysis()
def writeStatLats(self, s, bunch, lattlength = 0):
self.bunchtwissanalysis.analyzeBunch(bunch)
emitx = self.bunchtwissanalysis.getEmittance(0)
betax = self.bunchtwissanalysis.getBeta(0)
alphax = self.bunchtwissanalysis.getAlpha(0)
betay = self.bunchtwissanalysis.getBeta(1)
alphay = self.bunchtwissanalysis.getAlpha(1)
emity = self.bunchtwissanalysis.getEmittance(1)
dispersionx = self.bunchtwissanalysis.getDispersion(0)
ddispersionx = self.bunchtwissanalysis.getDispersionDerivative(0)
dispersiony = self.bunchtwissanalysis.getDispersion(1)
ddispersiony = self.bunchtwissanalysis.getDispersionDerivative(1)
sp = bunch.getSyncParticle()
time = sp.time()
if lattlength > 0:
time = sp.time()/(lattlength/(sp.beta() * speed_of_light))
# if mpi operations are enabled, this section of code will
# determine the rank of the present node
rank = 0 # default is primary node
mpi_init = orbit_mpi.MPI_Initialized()
comm = orbit_mpi.mpi_comm.MPI_COMM_WORLD
if (mpi_init):
rank = orbit_mpi.MPI_Comm_rank(comm)
# only the primary node needs to output the calculated information
if (rank == 0):
self.file_out.write(str(s) + "\t" + str(time) + "\t" + str(emitx)+ "\t" + str(emity)+ "\t" + str(betax)+ "\t" + str(betay)+ "\t" + str(alphax)+ "\t" + str(alphay) +"\t" + str(dispersionx) + "\t" + str(ddispersionx) + "\n")
def closeStatLats(self):
self.file_out.close()
class StatLatsSetMember:
"""
This class delivers the statistical twiss parameters
"""
def __init__(self, file):
self.file_out = file
self.bunchtwissanalysis = BunchTwissAnalysis()
def writeStatLats(self, s, bunch, lattlength = 0):
self.bunchtwissanalysis.analyzeBunch(bunch)
emitx = self.bunchtwissanalysis.getEmittance(0)
betax = self.bunchtwissanalysis.getBeta(0)
alphax = self.bunchtwissanalysis.getAlpha(0)
betay = self.bunchtwissanalysis.getBeta(1)
alphay = self.bunchtwissanalysis.getAlpha(1)
emity = self.bunchtwissanalysis.getEmittance(1)
dispersionx = self.bunchtwissanalysis.getDispersion(0)
ddispersionx = self.bunchtwissanalysis.getDispersionDerivative(0)
#dispersiony = self.bunchtwissanalysis.getDispersion(1, bunch)
#ddispersiony = self.bunchtwissanalysis.getDispersionDerivative(1, bunch)
sp = bunch.getSyncParticle()
time = sp.time()
if lattlength > 0:
time = sp.time()/(lattlength/(sp.beta() * speed_of_light))
# if mpi operations are enabled, this section of code will
# determine the rank of the present node
rank = 0 # default is primary node
mpi_init = orbit_mpi.MPI_Initialized()
comm = orbit_mpi.mpi_comm.MPI_COMM_WORLD
if (mpi_init):
rank = orbit_mpi.MPI_Comm_rank(comm)
# only the primary node needs to output the calculated information
if (rank == 0):
self.file_out.write(str(s) + "\t" + str(time) + "\t" + str(emitx)+ "\t" + str(emity)+ "\t" + str(betax)+ "\t" + str(betay)+ "\t" + str(alphax)+ "\t" + str(alphay) + "\t" + str(dispersionx) + "\t" + str(ddispersionx) +"\n")
def closeStatLats(self):
self.file_out.close()
def resetFile(self, file):
self.file_out = file
class Moments:
"""
This class delivers the beam moments
"""
def __init__(self, filename, order, nodispersion, emitnorm):
self.file_out = open(filename,"a")
self.bunchtwissanalysis = BunchTwissAnalysis()
self.order = order
if(nodispersion == False):
self.dispterm = -1
else:
self.dispterm = 1
if(emitnorm == True):
self.emitnormterm = 1
else:
self.emitnormterm = -1
def writeMoments(self, s, bunch, lattlength = 0):
sp = bunch.getSyncParticle()
time = sp.time()
if lattlength > 0:
time = sp.time()/(lattlength/(sp.beta() * speed_of_light))
self.bunchtwissanalysis.computeBunchMoments(bunch, self.order, self.dispterm, self.emitnormterm)
# if mpi operations are enabled, this section of code will
# determine the rank of the present node
rank = 0 # default is primary node
mpi_init = orbit_mpi.MPI_Initialized()
comm = orbit_mpi.mpi_comm.MPI_COMM_WORLD
if (mpi_init):
rank = orbit_mpi.MPI_Comm_rank(comm)
# only the primary node needs to output the calculated information
if (rank == 0):
self.file_out.write(str(s) + "\t" + str(time) + "\t")
for i in range(0,self.order+1):
for j in range(0,i+1):
self.file_out.write(str(self.bunchtwissanalysis.getBunchMoment(i-j,j)) + "\t")
self.file_out.write("\n")
def closeMoments(self):
self.file_out.close()
class MomentsSetMember:
"""
This class delivers the beam moments
"""
def __init__(self, file, order, nodispersion, emitnorm):
self.file_out = file
self.order = order
self.bunchtwissanalysis = BunchTwissAnalysis()
if(nodispersion == False):
self.dispterm = -1
else:
self.dispterm = 1
if(emitnorm == True):
self.emitnormterm = 1
else:
self.emitnormterm = -1
def writeMoments(self, s, bunch, lattlength = 0 ):
sp = bunch.getSyncParticle()
time = sp.time()
if lattlength > 0:
time = sp.time()/(lattlength/(sp.beta() * speed_of_light))
self.bunchtwissanalysis.computeBunchMoments(bunch, self.order, self.dispterm, self.emitnormterm)
# if mpi operations are enabled, this section of code will
# determine the rank of the present node
rank = 0 # default is primary node
mpi_init = orbit_mpi.MPI_Initialized()
comm = orbit_mpi.mpi_comm.MPI_COMM_WORLD
if (mpi_init):
rank = orbit_mpi.MPI_Comm_rank(comm)
# only the primary node needs to output the calculated information
if (rank == 0):
self.file_out.write(str(s) + "\t" + str(time) + "\t")
for i in range(0,self.order+1):
for j in range(0,i+1):
self.file_out.write(str(self.bunchtwissanalysis.getBunchMoment(i-j,j)) + "\t")
self.file_out.write("\n")
def resetFile(self, file):
self.file_out = file
class BPMSignal:
"""
This class delivers the average value for coordinate x and y
"""
def __init__(self):
self.bunchtwissanalysis = BunchTwissAnalysis()
self.xAvg = 0.0
self.yAvg = 0.0
self.xpAvg = 0.0
self.ypAvg = 0.0
def analyzeSignal(self, bunch):
self.bunchtwissanalysis.analyzeBunch(bunch)
# if mpi operations are enabled, this section of code will
# determine the rank of the present node
rank = 0 # default is primary node
mpi_init = orbit_mpi.MPI_Initialized()
comm = orbit_mpi.mpi_comm.MPI_COMM_WORLD
if (mpi_init):
rank = orbit_mpi.MPI_Comm_rank(comm)
# only the primary node needs to output the calculated information
if (rank == 0):
self.xAvg = self.bunchtwissanalysis.getAverage(0)
self.xpAvg = self.bunchtwissanalysis.getAverage(1)
self.yAvg = self.bunchtwissanalysis.getAverage(2)
self.ypAvg = self.bunchtwissanalysis.getAverage(3)
def getSignalX(self):
return self.xAvg
def getSignalXP(self):
return self.xpAvg
def getSignalY(self):
return self.yAvg
def getSignalYP(self):
return self.ypAvg
| 30
| 226
| 0.711647
| 1,001
| 7,470
| 5.201798
| 0.136863
| 0.152103
| 0.035913
| 0.028807
| 0.825619
| 0.789706
| 0.768581
| 0.768581
| 0.750144
| 0.702324
| 0
| 0.010647
| 0.170147
| 7,470
| 248
| 227
| 30.120968
| 0.829327
| 0.184605
| 0
| 0.748428
| 0
| 0
| 0.009649
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.119497
| false
| 0
| 0.062893
| 0.025157
| 0.238994
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2f0f99bf2d3fb78f9dfa17d6cdc3270aef08ce1b
| 219
|
py
|
Python
|
torch_model_state/__init__.py
|
FebruaryBreeze/torch-model-state
|
be457be8eed2e867a19891ee6fbef55c30b017ad
|
[
"MIT"
] | null | null | null |
torch_model_state/__init__.py
|
FebruaryBreeze/torch-model-state
|
be457be8eed2e867a19891ee6fbef55c30b017ad
|
[
"MIT"
] | null | null | null |
torch_model_state/__init__.py
|
FebruaryBreeze/torch-model-state
|
be457be8eed2e867a19891ee6fbef55c30b017ad
|
[
"MIT"
] | null | null | null |
from .cli_main import cli_main
from .__version__ import __version__
from .state_utils import from_state, to_state
from .state_utils import load_model_from_state
from .state_utils import load_state_file, save_state_file
| 36.5
| 57
| 0.86758
| 36
| 219
| 4.666667
| 0.333333
| 0.267857
| 0.25
| 0.357143
| 0.345238
| 0.345238
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100457
| 219
| 5
| 58
| 43.8
| 0.852792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2f29d7fca140dafd4a3d272c4cb64b7fb8a203e7
| 13,012
|
py
|
Python
|
gluebox/tests/utils/test_metadata.py
|
mwhahaha/gluebox
|
f8f2ac0f434418fc24143c3a7691517e0742574a
|
[
"Apache-2.0"
] | null | null | null |
gluebox/tests/utils/test_metadata.py
|
mwhahaha/gluebox
|
f8f2ac0f434418fc24143c3a7691517e0742574a
|
[
"Apache-2.0"
] | null | null | null |
gluebox/tests/utils/test_metadata.py
|
mwhahaha/gluebox
|
f8f2ac0f434418fc24143c3a7691517e0742574a
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslotest import base
from gluebox.utils.metadata import MetadataManager
class TestMetadata(base.BaseTestCase):
"""Test case for metadata utils."""
def setUp(self):
super(TestMetadata, self).setUp()
def _dummy_metadata(self):
data = {
u'issues_url': u'https://bugs.launchpad.net/puppet-aodh',
u'requirements': [
{u'name': u'pe', u'version_requirement': u'4.x'},
{u'name': u'puppet', u'version_requirement': u'4.x'}
],
u'name': u'openstack-aodh',
u'license': u'Apache-2.0',
u'author': u'OpenStack Contributors',
u'project_page': u'https://launchpad.net/puppet-aodh',
u'operatingsystem_support': [
{u'operatingsystemrelease': [u'8'],
u'operatingsystem': u'Debian'},
{u'operatingsystemrelease': [u'24'],
u'operatingsystem': u'Fedora'},
{u'operatingsystemrelease': [u'7'],
u'operatingsystem': u'RedHat'},
{u'operatingsystemrelease': [u'16.04'],
u'operatingsystem': u'Ubuntu'}
],
u'summary': u'Puppet module for OpenStack Aodh',
u'source': u'git://github.com/openstack/puppet-aodh.git',
u'dependencies': [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.0.0 <12.0.0'}
],
u'version': u'11.0.0',
u'description': u'Installs and configures OpenStack Aodh.'
}
return data
def test_get_current_version(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
self.assertEqual(manager.get_current_version(), '11.0.0')
def test_fix_dependencies_major(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._fix_dependencies_major()
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=12.0.0 <13.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=12.0.0 <13.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=12.0.0 <13.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
def test_fix_dependencies_minor(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._fix_dependencies_minor()
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.1.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.1.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.1.0 <12.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
def test_major_bump(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.major_bump()
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=12.0.0 <13.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=12.0.0 <13.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=12.0.0 <13.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '12.0.0')
manager._parse.assert_called_once()
manager._write.assert_called_once()
def test_major_bump_with_static_dev_version(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.major_bump('12.0.1', True)
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=12.0.0 <13.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=12.0.0 <13.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=12.0.0 <13.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '12.0.1-dev')
manager._parse.assert_called_once()
manager._write.assert_called_once()
def test_major_bump_skip_deps(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.major_bump(skip_update_deps=True)
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.0.0 <12.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '12.0.0')
manager._parse.assert_called_once()
manager._write.assert_called_once()
def test_minor_bump(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.minor_bump()
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.1.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.1.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.1.0 <12.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '11.1.0')
manager._parse.assert_called_once()
manager._write.assert_called_once()
def test_minor_bump_with_static_dev_version(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.minor_bump('11.1.1', True)
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.1.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.1.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.1.0 <12.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '11.1.1-dev')
manager._parse.assert_called_once()
manager._write.assert_called_once()
def test_minor_bump_skip_deps(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.minor_bump(skip_update_deps=True)
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.0.0 <12.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '11.1.0')
manager._parse.assert_called_once()
manager._write.assert_called_once()
def test_dev_remove(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager.metadata['version'] = '11.0.0-dev'
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.dev_remove()
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.0.0 <12.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '11.0.0')
manager._parse.assert_called_once()
manager._write.assert_called_once()
def test_dev_remove_with_static_version(self):
manager = MetadataManager('puppet-aodh', 'openstack')
manager.metadata = self._dummy_metadata()
manager.metadata['version'] = '11.0.0-dev'
manager._parse = mock.MagicMock()
manager._write = mock.MagicMock()
manager.dev_remove('11.0.0-abc')
updated_deps = [
{u'name': u'puppetlabs/inifile',
u'version_requirement': u'>=1.0.0 <2.0.0'},
{u'name': u'puppetlabs/stdlib',
u'version_requirement': u'>= 4.2.0 <5.0.0'},
{u'name': u'openstack/keystone',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/openstacklib',
u'version_requirement': u'>=11.0.0 <12.0.0'},
{u'name': u'openstack/oslo',
u'version_requirement': u'>=11.0.0 <12.0.0'}
]
self.assertEqual(manager.metadata['dependencies'], updated_deps)
self.assertEqual(manager.metadata['version'], '11.0.0-abc')
manager._parse.assert_called_once()
manager._write.assert_called_once()
| 44.714777
| 75
| 0.575469
| 1,632
| 13,012
| 4.448529
| 0.095588
| 0.027273
| 0.047934
| 0.157025
| 0.826171
| 0.819835
| 0.819835
| 0.819835
| 0.819835
| 0.812121
| 0
| 0.049269
| 0.263757
| 13,012
| 291
| 76
| 44.714777
| 0.70856
| 0.042499
| 0
| 0.739464
| 0
| 0
| 0.33778
| 0.031745
| 0
| 0
| 0
| 0
| 0.1341
| 1
| 0.049808
| false
| 0
| 0.011494
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2f55d0f2548d2147118bdcb6209137046fe83558
| 13,867
|
py
|
Python
|
baidu_translate.py
|
lj28478416/Baidu_Translate
|
a4a97f95f6b1cf2fbfec30e6ca0979e39da28cbd
|
[
"Apache-2.0"
] | null | null | null |
baidu_translate.py
|
lj28478416/Baidu_Translate
|
a4a97f95f6b1cf2fbfec30e6ca0979e39da28cbd
|
[
"Apache-2.0"
] | null | null | null |
baidu_translate.py
|
lj28478416/Baidu_Translate
|
a4a97f95f6b1cf2fbfec30e6ca0979e39da28cbd
|
[
"Apache-2.0"
] | null | null | null |
import requests
import re
from jsonpath import jsonpath
import execjs
class Baidu_Spider:
def __init__(self, str1):
self.str1 = str1
self.data = {
'from': None,
'to': None,
'query': self.str1,
# 'transtype': 'realtime',
'simple_means_flag': '3',
'sign': None,
'token': None
}
self.headers = {
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Connection': 'keep-alive',
'Content-Length': str(len(self.str1)),
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Cookie': 'BIDUPSID=5F41AD72DE2B70C7E5B720C1AAC5D934; PSTM=1520767606; __cfduid=d80aee4399f1af462023406b1186041001521651362; BDUSS=dIODJYZUliaUpjM0ZLT2FWQlczdVZMWHdLODhuWHdGeGZGdjVPZ045a0ctdHBhQVFBQUFBJCQAAAAAAAAAAAEAAABAWnAJ1b25-rXEs~bX4rO1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZts1oGbbNaUl; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; BDORZ=FFFB88E999055A3F8A630C64834BD6D0; BAIDUID=9FB74B58935632576109BF783DAA9891:FG=1; H_PS_PSSID=1452_26459_21103_22073; BDSFRCVID=DCCsJeC629o9EeO7s9P4Ur6YL_gB6hcTH6aoAYcV90HgLjMCvnQkEG0PDx8g0Kub-jINogKK3gOTH4nP; H_BDCLCKID_SF=tJPqoCtKtCvHfP8k-tcH244Hqxby26n-fRneaJ5n0-nnhP3vXqKbKPuVjUnRqPb-benabUoMJnOG8RKRy6C-jTvBDG_eq-JQ2C7WsJjs24ThD6rnhPF3XlKvKP6-3MJO3b7CLR0KJ-OVh43SMn3D3h8mqt6baf3A0ekeohFLtDKKMD-CDT035n-Wql702-6MaC6y0njVanI_Hn7zentbQbtpbt-qJj5L3CO03K3b3hbA8DbC2xTV5njQhh5nBT5Ka26CQ4jCbUj0VnKwjUJV3btkQN3T0nkO5bRiLRoLBxTcDn3oyTbJXp0njMTTqjDJfRCJoCPyfbP_etbg-trSMDCShUFs547C-2Q-5KL-fRABsJ5FjnbnQhLIjhjEWJvn2DTCLfbdJJjoEfTn36u2y--TM4cm-t6I22TxoUJgMInJhhvG-Cc8yfCebPRiJ-b9Qg-JWftLtDKbbD-Cj6-3Mb03K2TEaPcQ2C6X0njVanI_HnurbhOzKUI8LNDHtjcvWe3B_-O-5M7lqxb9Mt_hD-443bO7ttoyt6CfoJ3JyloDEnP9KljSeML1Db3BW6vMtg3t3tQ6aDnoepvoDPJc3Mv3hR0EJjLJJbkJoCLQb-3bK43ph4Oo5tFQqxby26nvWgneaJ5nJDohExo50RKbKTFh-noHtqcr565KsCJvQpP-HJ7DbRQD5bFiMbj33q58JCrCKl0MLpbWbb0xynoDDqKvKfnMBMPe52OnaIb8LIFKMKIlj5LWe5PJqxoJKPo0aIKX3b7EfhbTEPO_bf--D6Feeto-W5befG4J-4_KylRrep_CM-bxy5K_hPQk5hK8tgbM2ncz0-Q_VJvHQT3mQbvbbN3i-4DL2e5wWb3cWKOJ8UbSj-Tme4tX-NFqJTKj3f; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1531655355,1531655743,1531656413,1531656420; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1531656420; locale=zh; to_lang_often=%5B%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D; from_lang_often=%5B%7B%22value%22%3A%22spa%22%2C%22text%22%3A%22%u897F%u73ED%u7259%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D',
'Host': 'fanyi.baidu.com',
'Origin': 'http://fanyi.baidu.com',
'Referer': 'http://fanyi.baidu.com/?aldtype=16047',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36',
'X-Requested-With': 'XMLHttpRequest'
}
def get_token_gtk(self):
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
'Cookie': 'BIDUPSID=5F41AD72DE2B70C7E5B720C1AAC5D934; PSTM=1520767606; __cfduid=d80aee4399f1af462023406b1186041001521651362; BDUSS=dIODJYZUliaUpjM0ZLT2FWQlczdVZMWHdLODhuWHdGeGZGdjVPZ045a0ctdHBhQVFBQUFBJCQAAAAAAAAAAAEAAABAWnAJ1b25-rXEs~bX4rO1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZts1oGbbNaUl; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; BDORZ=FFFB88E999055A3F8A630C64834BD6D0; BAIDUID=9FB74B58935632576109BF783DAA9891:FG=1; H_PS_PSSID=1452_26459_21103_22073; BDSFRCVID=DCCsJeC629o9EeO7s9P4Ur6YL_gB6hcTH6aoAYcV90HgLjMCvnQkEG0PDx8g0Kub-jINogKK3gOTH4nP; H_BDCLCKID_SF=tJPqoCtKtCvHfP8k-tcH244Hqxby26n-fRneaJ5n0-nnhP3vXqKbKPuVjUnRqPb-benabUoMJnOG8RKRy6C-jTvBDG_eq-JQ2C7WsJjs24ThD6rnhPF3XlKvKP6-3MJO3b7CLR0KJ-OVh43SMn3D3h8mqt6baf3A0ekeohFLtDKKMD-CDT035n-Wql702-6MaC6y0njVanI_Hn7zentbQbtpbt-qJj5L3CO03K3b3hbA8DbC2xTV5njQhh5nBT5Ka26CQ4jCbUj0VnKwjUJV3btkQN3T0nkO5bRiLRoLBxTcDn3oyTbJXp0njMTTqjDJfRCJoCPyfbP_etbg-trSMDCShUFs547C-2Q-5KL-fRABsJ5FjnbnQhLIjhjEWJvn2DTCLfbdJJjoEfTn36u2y--TM4cm-t6I22TxoUJgMInJhhvG-Cc8yfCebPRiJ-b9Qg-JWftLtDKbbD-Cj6-3Mb03K2TEaPcQ2C6X0njVanI_HnurbhOzKUI8LNDHtjcvWe3B_-O-5M7lqxb9Mt_hD-443bO7ttoyt6CfoJ3JyloDEnP9KljSeML1Db3BW6vMtg3t3tQ6aDnoepvoDPJc3Mv3hR0EJjLJJbkJoCLQb-3bK43ph4Oo5tFQqxby26nvWgneaJ5nJDohExo50RKbKTFh-noHtqcr565KsCJvQpP-HJ7DbRQD5bFiMbj33q58JCrCKl0MLpbWbb0xynoDDqKvKfnMBMPe52OnaIb8LIFKMKIlj5LWe5PJqxoJKPo0aIKX3b7EfhbTEPO_bf--D6Feeto-W5befG4J-4_KylRrep_CM-bxy5K_hPQk5hK8tgbM2ncz0-Q_VJvHQT3mQbvbbN3i-4DL2e5wWb3cWKOJ8UbSj-Tme4tX-NFqJTKj3f; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1531655355,1531655743,1531656413,1531656420; locale=zh; to_lang_often=%5B%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D; from_lang_often=%5B%7B%22value%22%3A%22spa%22%2C%22text%22%3A%22%u897F%u73ED%u7259%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1531659240',
'Host': 'fanyi.baidu.com',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.117 Safari/537.36'
}
url1 = 'http://fanyi.baidu.com/?'
response = requests.get(url1, headers=headers).content.decode()
partten = re.compile(r"token:\s'(.*?)',")
token = re.search(partten, response).group(1)
partten1 = re.compile(r"window.gtk\s=\s'([\d\.]+?)';")
gtk = re.search(partten1, response).group(1)
self.data['token'] = token
self.gtk = gtk
def get_language_class(self):
url = 'http://fanyi.baidu.com/langdetect'
headers = {
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Cookie': 'BIDUPSID=5F41AD72DE2B70C7E5B720C1AAC5D934; PSTM=1520767606;'
' __cfduid=d80aee4399f1af462023406b1186041001521651362;'
' BDUSS=dIODJYZUliaUpjM0ZLT2FWQlczdVZMWHdLODhuWHdGeGZGdj'
'VPZ045a0ctdHBhQVFBQUFBJCQAAAAAAAAAAAEAAABAWnAJ1b25-rXEs'
'~bX4rO1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
'AAAAAAAAAAAAAAAAAAAZts1oGbbNaUl; REALTIME_TRANS_SWITCH=1;'
' FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1;'
' SOUND_PREFER_SWITCH=1; BDORZ=FFFB88E999055A3F8A630C64834BD6D0;'
' BAIDUID=9FB74B58935632576109BF783DAA9891:FG=1;'
' H_PS_PSSID=1452_26459_21103_22073;'
' BDSFRCVID=DCCsJeC629o9EeO7s9P4Ur6YL_gB6hcTH6ao'
'AYcV90HgLjMCvnQkEG0PDx8g0Kub-jINogKK3gOTH4nP; '
'H_BDCLCKID_SF=tJPqoCtKtCvHfP8k-tcH244Hqxby26n-fRnea'
'J5n0-nnhP3vXqKbKPuVjUnRqPb-benabUoMJnOG8RKRy6C-jTvBD'
'G_eq-JQ2C7WsJjs24ThD6rnhPF3XlKvKP6-3MJO3b7CLR0KJ-OVh'
'43SMn3D3h8mqt6baf3A0ekeohFLtDKKMD-CDT035n-Wql702-6Ma'
'C6y0njVanI_Hn7zentbQbtpbt-qJj5L3CO03K3b3hbA8DbC2xTV5n'
'jQhh5nBT5Ka26CQ4jCbUj0VnKwjUJV3btkQN3T0nkO5bRiLRoLBxTc'
'Dn3oyTbJXp0njMTTqjDJfRCJoCPyfbP_etbg-trSMDCShUFs547C-2'
'Q-5KL-fRABsJ5FjnbnQhLIjhjEWJvn2DTCLfbdJJjoEfTn36u2y--T'
'M4cm-t6I22TxoUJgMInJhhvG-Cc8yfCebPRiJ-b9Qg-JWftLtDKbbD-'
'Cj6-3Mb03K2TEaPcQ2C6X0njVanI_HnurbhOzKUI8LNDHtjcvWe3B_-O'
'-5M7lqxb9Mt_hD-443bO7ttoyt6CfoJ3JyloDEnP9KljSeML1Db3BW6v'
'Mtg3t3tQ6aDnoepvoDPJc3Mv3hR0EJjLJJbkJoCLQb-3bK43ph4Oo5tF'
'Qqxby26nvWgneaJ5nJDohExo50RKbKTFh-noHtqcr565KsCJvQpP-HJ7'
'DbRQD5bFiMbj33q58JCrCKl0MLpbWbb0xynoDDqKvKfnMBMPe52OnaIb'
'8LIFKMKIlj5LWe5PJqxoJKPo0aIKX3b7EfhbTEPO_bf--D6Feeto-W5b'
'efG4J-4_KylRrep_CM-bxy5K_hPQk5hK8tgbM2ncz0-Q_VJvHQT3mQbv'
'bbN3i-4DL2e5wWb3cWKOJ8UbSj-Tme4tX-NFqJTKj3f; Hm_lvt_64ec'
'd82404c51e03dc91cb9e8c025574=1531655355,1531655743,15316'
'56413,1531656420; locale=zh; to_lang_often=%5B%7B%22valu'
'e%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%'
'7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%'
'22%7D%5D; from_lang_often=%5B%7B%22value%22%3A%22spa%22%'
'2C%22text%22%3A%22%u897F%u73ED%u7259%u8BED%22%7D%2C%7B%2'
'2value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7'
'D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u'
'8BED%22%7D%5D; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574='
'1531659249',
'Host': 'fanyi.baidu.com',
'Origin': 'http://fanyi.baidu.com',
'Referer': 'http://fanyi.baidu.com/?',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/66.0.3359.117 Safari/537.36',
'X-Requested-With': 'XMLHttpRequest'
}
data = {
'query': self.str1
}
if len(data['query']) >= 50:
data['query'] = data['query'][:50]
language_class_response = requests.post(url, headers=headers, data=data).json()
language_class = jsonpath(language_class_response, '$..lan')[0]
if language_class == 'zh':
self.data['from'] = 'zh'
self.data['to'] = 'en'
else:
self.data['from'] = 'en'
self.data['to'] = 'zh'
def get_sign(self):
self.data['sign'] = execjs.compile(open(r"sign.js").read()).call('hash', self.str1, self.gtk)
def open_url(self):
url2 = 'http://fanyi.baidu.com/v2transapi'
headers = {
'Accept': '*/*',
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Cookie': 'BIDUPSID=5F41AD72DE2B70C7E5B720C1AAC5D934; PSTM=1520767606; __cfduid=d80aee4399f1af462023406b1186041001521651362; BDUSS=dIODJYZUliaUpjM0ZLT2FWQlczdVZMWHdLODhuWHdGeGZGdjVPZ045a0ctdHBhQVFBQUFBJCQAAAAAAAAAAAEAAABAWnAJ1b25-rXEs~bX4rO1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZts1oGbbNaUl; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; BDORZ=FFFB88E999055A3F8A630C64834BD6D0; BAIDUID=9FB74B58935632576109BF783DAA9891:FG=1; H_PS_PSSID=1452_26459_21103_22073; BDSFRCVID=DCCsJeC629o9EeO7s9P4Ur6YL_gB6hcTH6aoAYcV90HgLjMCvnQkEG0PDx8g0Kub-jINogKK3gOTH4nP; H_BDCLCKID_SF=tJPqoCtKtCvHfP8k-tcH244Hqxby26n-fRneaJ5n0-nnhP3vXqKbKPuVjUnRqPb-benabUoMJnOG8RKRy6C-jTvBDG_eq-JQ2C7WsJjs24ThD6rnhPF3XlKvKP6-3MJO3b7CLR0KJ-OVh43SMn3D3h8mqt6baf3A0ekeohFLtDKKMD-CDT035n-Wql702-6MaC6y0njVanI_Hn7zentbQbtpbt-qJj5L3CO03K3b3hbA8DbC2xTV5njQhh5nBT5Ka26CQ4jCbUj0VnKwjUJV3btkQN3T0nkO5bRiLRoLBxTcDn3oyTbJXp0njMTTqjDJfRCJoCPyfbP_etbg-trSMDCShUFs547C-2Q-5KL-fRABsJ5FjnbnQhLIjhjEWJvn2DTCLfbdJJjoEfTn36u2y--TM4cm-t6I22TxoUJgMInJhhvG-Cc8yfCebPRiJ-b9Qg-JWftLtDKbbD-Cj6-3Mb03K2TEaPcQ2C6X0njVanI_HnurbhOzKUI8LNDHtjcvWe3B_-O-5M7lqxb9Mt_hD-443bO7ttoyt6CfoJ3JyloDEnP9KljSeML1Db3BW6vMtg3t3tQ6aDnoepvoDPJc3Mv3hR0EJjLJJbkJoCLQb-3bK43ph4Oo5tFQqxby26nvWgneaJ5nJDohExo50RKbKTFh-noHtqcr565KsCJvQpP-HJ7DbRQD5bFiMbj33q58JCrCKl0MLpbWbb0xynoDDqKvKfnMBMPe52OnaIb8LIFKMKIlj5LWe5PJqxoJKPo0aIKX3b7EfhbTEPO_bf--D6Feeto-W5befG4J-4_KylRrep_CM-bxy5K_hPQk5hK8tgbM2ncz0-Q_VJvHQT3mQbvbbN3i-4DL2e5wWb3cWKOJ8UbSj-Tme4tX-NFqJTKj3f; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1531655355,1531655743,1531656413,1531656420; locale=zh; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1531659249; PSINO=7; to_lang_often=%5B%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D; from_lang_often=%5B%7B%22value%22%3A%22spa%22%2C%22text%22%3A%22%u897F%u73ED%u7259%u8BED%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D',
'Host': 'fanyi.baidu.com',
'Origin': 'http://fanyi.baidu.com',
'Referer': 'http://fanyi.baidu.com/?',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko)'
' Chrome/66.0.3359.117 Safari/537.36',
'X-Requested-With': 'XMLHttpRequest'
}
print(self.data['query'])
translate_respons = requests.post(url2, data=self.data, headers=headers).json()
translate = jsonpath(translate_respons, "$..dst")[0]
print(translate)
def main(self):
self.get_token_gtk()
self.get_language_class()
self.get_sign()
self.open_url()
if __name__ == '__main__':
while True:
word = input('---------------请输入要翻译的内容:--------------\n')
baidu_spider = Baidu_Spider(word)
baidu_spider.main()
| 90.633987
| 2,109
| 0.713565
| 1,355
| 13,867
| 7.162362
| 0.21476
| 0.016486
| 0.020608
| 0.02473
| 0.753323
| 0.749201
| 0.731582
| 0.731582
| 0.726842
| 0.726842
| 0
| 0.204847
| 0.172712
| 13,867
| 152
| 2,110
| 91.230263
| 0.641126
| 0.001731
| 0
| 0.21831
| 0
| 0.098592
| 0.731463
| 0.599606
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042254
| false
| 0
| 0.028169
| 0
| 0.077465
| 0.014085
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2f806c4c8ad8054b7163ad08fe035751455b5672
| 29,253
|
py
|
Python
|
hippos.py
|
randyrollofson/happy-hippos
|
ac5a54e967fbd7d757b73ec47727067572c12597
|
[
"MIT"
] | null | null | null |
hippos.py
|
randyrollofson/happy-hippos
|
ac5a54e967fbd7d757b73ec47727067572c12597
|
[
"MIT"
] | null | null | null |
hippos.py
|
randyrollofson/happy-hippos
|
ac5a54e967fbd7d757b73ec47727067572c12597
|
[
"MIT"
] | null | null | null |
import colors
bg = colors.bg
ol = colors.ol
c1 = colors.c1
c2 = colors.c2
c3 = colors.c3
c4 = colors.c4
c5 = colors.c5
ey = colors.ey
p1 = colors.p1
p2 = colors.p2
hippo_basic = [
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, ol, ol, ol, ol, ol, bg, ol, c3, ol, c4, c5, ol, ol, ol, ol, bg, ol, ol, ol, ol, c5, c4, ol, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, c2, c2, c2, c2, c2, ol, ol, c3, c3, c3, c3, c3, c2, c2, c3, ol, c5, c3, c2, c2, c2, c3, c3, c5, ol, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c2, c2, c3, c3, c3, ol, c3, c3, c4, c3, ol, ol, c3, c3, c3, c3, ol, ol, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c2, c2, c1, c1, c2, c2, ol, c5, c3, c2, ol, ey, ey, ol, c4, c3, ol, ey, ey, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c1, c1, c1, c1, c1, c1, c1, c1, c3, ol, c4, c2, ol, ey, ol, ol, c4, c2, ol, ey, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c3, c1, c3, c3, ol, c5, ol, ol, ol, ol, c2, c2, ol, ol, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c2, c1, c1, c1, c1, c1, c1, c3, c2, ol, c4, c2, c2, c2, c2, c2, c2, c2, c2, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c2, c1, c1, c1, c1, c1, c2, c2, ol, c2, c2, c2, c1, c1, c1, c1, c1, c2, c2, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c2, ol, c4, c4, c3, c1, c1, c1, ol, ol, c1, c1, c1, c1, c1, c1, ol, ol, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c1, c1, c1, c1, c1, c3, c3, ol, c4, c4, c3, c3, c1, c1, ol, c4, ol, c1, c1, c1, c1, c1, ol, c4, ol, ol, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c1, c1, c1, c4, c3, ol, c5, c4, c2, c2, c1, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, ol, ol, ol, c5, ol, bg, bg, bg, bg],
[c2, c4, c2, c2, c2, c1, c1, c4, c4, ol, c5, c4, c2, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c2, c2, c2, c2, c2, c4, ol, c4, c3, c3, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c4, c2, c2, c2, c2, c4, ol, c4, c4, c3, c2, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, c1, c1, ol, ol, c3, ol, bg, bg, bg, bg],
[c4, c3, c4, c4, c4, c2, c2, c3, c3, ol, c5, c4, c3, c2, c1, c1, c3, c2, ol, ol, ol, ol, ol, ol, ol, ol, ol, c3, c2, ol, bg, bg, bg, bg],
[c4, c4, c2, c2, c3, c4, c3, c3, c3, c3, ol, c4, c4, c3, c2, c2, c1, c1, c2, c2, c2, c2, c2, c2, c2, c2, c3, c1, ol, bg, bg, bg, bg, bg],
[c3, c3, c4, c2, c4, c5, c3, c5, c3, c4, c5, ol, c3, c3, c3, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, ol, bg, bg, bg, bg, bg],
[c4, c3, c3, c3, c3, c3, c4, c4, c2, c4, c5, c5, ol, c5, c4, c4, c3, c1, c1, c1, c1, c1, c1, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg],
[c4, c4, c4, c4, c4, c3, c3, c4, c1, c5, c4, c5, c5, ol, ol, ol, ol, c5, c4, c3, c2, c2, c2, c2, c2, ol, ol, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c4, c4, c4, c4, c3, c1, c1, c4, c4, c5, ol, bg, bg, bg, bg, ol, ol, ol, ol, ol, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c4, c4, c1, c1, c3, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c5, c4, c1, c1, c3, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, ol, ol, ol, c5, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, ol, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg]
]
hippo_two_teeth = [
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, ol, ol, ol, ol, ol, bg, ol, c3, ol, c4, c5, ol, ol, ol, ol, bg, ol, ol, ol, ol, c5, c4, ol, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, c2, c2, c2, c2, c2, ol, ol, c3, c3, c3, c3, c3, c2, c2, c3, ol, c5, c3, c2, c2, c2, c3, c3, c5, ol, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c2, c2, c3, c3, c3, ol, c3, c3, c4, c3, ol, ol, c3, c3, c3, c3, ol, ol, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c2, c2, c1, c1, c2, c2, ol, c5, c3, c2, ol, ey, ey, ol, c4, c3, ol, ey, ey, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c1, c1, c1, c1, c1, c1, c1, c1, c3, ol, c4, c2, ol, ey, ol, ol, c4, c2, ol, ey, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c3, c1, c3, c3, ol, c5, ol, ol, ol, ol, c2, c2, ol, ol, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c2, c1, c1, c1, c1, c1, c1, c3, c2, ol, c4, c2, c2, c2, c2, c2, c2, c2, c2, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c2, c1, c1, c1, c1, c1, c2, c2, ol, c2, c2, c2, c1, c1, c1, c1, c1, c2, c2, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c2, ol, c4, c4, c3, c1, c1, c1, ol, ol, c1, c1, c1, c1, c1, c1, ol, ol, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c1, c1, c1, c1, c1, c3, c3, ol, c4, c4, c3, c3, c1, c1, ol, c4, ol, c1, c1, c1, c1, c1, ol, c4, ol, ol, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c1, c1, c1, c4, c3, ol, c5, c4, c2, c2, c1, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, ol, ol, ol, c5, ol, bg, bg, bg, bg],
[c2, c4, c2, c2, c2, c1, c1, c4, c4, ol, c5, c4, c2, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c2, c2, c2, c2, c2, c4, ol, c4, c3, c3, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c4, c2, c2, c2, c2, c4, ol, c4, c4, c3, c2, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, c1, c1, ol, ol, c3, ol, bg, bg, bg, bg],
[c4, c3, c4, c4, c4, c2, c2, c3, c3, ol, c5, c4, c3, c2, c1, c1, c3, c2, ol, ol, ol, ol, ol, ol, ol, ol, ol, c3, c2, ol, bg, bg, bg, bg],
[c4, c4, c2, c2, c3, c4, c3, c3, c3, c3, ol, c4, c4, c3, c2, c2, c1, c1, ol, ey, ol, c2, c2, c2, ol, ey, ol, c1, ol, bg, bg, bg, bg, bg],
[c3, c3, c4, c2, c4, c5, c3, c5, c3, c4, c5, ol, c3, c3, c3, c2, c1, c1, ol, ol, ol, c1, c1, c1, ol, ol, ol, c1, ol, bg, bg, bg, bg, bg],
[c4, c3, c3, c3, c3, c3, c4, c4, c2, c4, c5, c5, ol, c5, c4, c4, c3, c1, c1, c1, c1, c1, c1, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg],
[c4, c4, c4, c4, c4, c3, c3, c4, c1, c5, c4, c5, c5, ol, ol, ol, ol, c5, c4, c3, c2, c2, c2, c2, c2, ol, ol, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c4, c4, c4, c4, c3, c1, c1, c4, c4, c5, ol, bg, bg, bg, bg, ol, ol, ol, ol, ol, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c4, c4, c1, c1, c3, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c5, c4, c1, c1, c3, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, ol, ol, ol, c5, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, ol, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg]
]
hippo_six_teeth = [
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, ol, ol, ol, ol, ol, bg, ol, c3, ol, c4, c5, ol, ol, ol, ol, bg, ol, ol, ol, ol, c5, c4, ol, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, c2, c2, c2, c2, c2, ol, ol, c3, c3, c3, c3, c3, c2, c2, c3, ol, c5, c3, c2, c2, c2, c3, c3, c5, ol, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c2, c2, c3, c3, c3, ol, c3, c3, c4, c3, ol, ol, c3, c3, c3, c3, ol, ol, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c2, c2, c1, c1, c2, c2, ol, c5, c3, c2, ol, ey, ey, ol, c4, c3, ol, ey, ey, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c1, c1, c1, c1, c1, c1, c1, c1, c3, ol, c4, c2, ol, ey, ol, ol, c4, c2, ol, ey, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c3, c1, c3, c3, ol, c5, ol, ol, ol, ol, c2, c2, ol, ol, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c2, c1, c1, c1, c1, c1, c1, c3, c2, ol, c4, c2, c2, c2, c2, c2, c2, c2, c2, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c2, c1, c1, c1, c1, c1, c2, c2, ol, c2, c2, c2, c1, c1, c1, c1, c1, c2, c2, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c2, ol, c4, c4, c3, c1, c1, c1, ol, ol, c1, c1, c1, c1, c1, c1, ol, ol, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c1, c1, c1, c1, c1, c3, c3, ol, c4, c4, c3, c3, c1, c1, ol, c4, ol, c1, c1, c1, c1, c1, ol, c4, ol, ol, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c1, c1, c1, c4, c3, ol, c5, c4, c2, c2, c1, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, ol, ol, ol, c5, ol, bg, bg, bg, bg],
[c2, c4, c2, c2, c2, c1, c1, c4, c4, ol, c5, c4, c2, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c2, c2, c2, c2, c2, c4, ol, c4, c3, c3, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c4, c2, c2, c2, c2, c4, ol, c4, c4, c3, c2, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, c1, c1, ol, ol, c3, ol, bg, bg, bg, bg],
[c4, c3, c4, c4, c4, c2, c2, c3, c3, ol, c5, c4, c3, c2, c1, c1, ol, ey, ol, ol, ol, ol, ol, ol, ol, ol, ol, ey, c2, ol, bg, bg, bg, bg],
[c4, c4, c2, c2, c3, c4, c3, c3, c3, c3, ol, c4, c4, c3, c2, c2, ol, ol, ol, ey, ol, ey, ol, ey, ol, ey, ol, ol, ol, bg, bg, bg, bg, bg],
[c3, c3, c4, c2, c4, c5, c3, c5, c3, c4, c5, ol, c3, c3, c3, c2, c1, c1, ol, ol, ol, ol, ol, ol, ol, ol, ol, c1, ol, bg, bg, bg, bg, bg],
[c4, c3, c3, c3, c3, c3, c4, c4, c2, c4, c5, c5, ol, c5, c4, c4, c3, c1, c1, c1, c1, c1, c1, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg],
[c4, c4, c4, c4, c4, c3, c3, c4, c1, c5, c4, c5, c5, ol, ol, ol, ol, c5, c4, c3, c2, c2, c2, c2, c2, ol, ol, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c4, c4, c4, c4, c3, c1, c1, c4, c4, c5, ol, bg, bg, bg, bg, ol, ol, ol, ol, ol, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c4, c4, c1, c1, c3, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c5, c4, c1, c1, c3, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, ol, ol, ol, c5, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, ol, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg]
]
hippo_smile = [
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, ol, ol, ol, ol, ol, bg, ol, c3, ol, c4, c5, ol, ol, ol, ol, bg, ol, ol, ol, ol, c5, c4, ol, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, c2, c2, c2, c2, c2, ol, ol, c3, c3, c3, c3, c3, c2, c2, c3, ol, c5, c3, c2, c2, c2, c3, c3, c5, ol, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c2, c2, c3, c3, c3, ol, c3, c3, c4, c3, ol, ol, c3, c3, c3, c3, ol, ol, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c2, c2, c1, c1, c2, c2, ol, c5, c3, c2, ol, ey, ey, ol, c4, c3, ol, ey, ey, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c1, c1, c1, c1, c1, c1, c1, c1, c3, ol, c4, c2, ol, ey, ol, ol, c4, c2, ol, ey, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c3, c1, c3, c3, ol, c5, ol, ol, ol, ol, c2, c2, ol, ol, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c2, c1, c1, c1, c1, c1, c1, c3, c2, ol, c4, c2, c2, c2, c2, c2, c2, c2, c2, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c2, c1, c1, c1, c1, c1, c2, c2, ol, c2, c2, c2, c1, c1, c1, c1, c1, c2, c2, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c2, ol, c4, c4, c3, c1, c1, c1, ol, ol, c1, c1, c1, c1, c1, c1, ol, ol, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c1, c1, c1, c1, c1, c3, c3, ol, c4, c4, c3, c3, c1, c1, ol, c4, ol, c1, c1, c1, c1, c1, ol, c4, ol, ol, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c1, c1, c1, c4, c3, ol, c5, c4, c2, c2, c1, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, ol, ol, ol, c5, ol, bg, bg, bg, bg],
[c2, c4, c2, c2, c2, c1, c1, c4, c4, ol, c5, c4, c2, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c2, c2, c2, c2, c2, c4, ol, c4, c3, c3, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c4, c2, c2, c2, c2, c4, ol, c4, c4, c3, c2, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, c1, c1, ol, ol, c3, ol, bg, bg, bg, bg],
[c4, c3, c4, c4, c4, c2, c2, c3, c3, ol, c5, c4, c3, c2, c1, c1, ol, ey, ol, ol, ol, ol, ol, ol, ol, ol, ol, ey, c2, ol, bg, bg, bg, bg],
[c4, c4, c2, c2, c3, c4, c3, c3, c3, c3, ol, c4, c4, c3, c2, c2, ol, ol, ol, p1, p1, p1, p1, p1, p1, p1, ol, ol, ol, bg, bg, bg, bg, bg],
[c3, c3, c4, c2, c4, c5, c3, c5, c3, c4, c5, ol, c3, c3, c3, c2, c1, c1, ol, ol, p2, p2, p2, p2, p2, ol, ol, c1, ol, bg, bg, bg, bg, bg],
[c4, c3, c3, c3, c3, c3, c4, c4, c2, c4, c5, c5, ol, c5, c4, c4, c3, c1, c1, c1, ol, ol, ol, ol, ol, c3, c3, ol, bg, bg, bg, bg, bg, bg],
[c4, c4, c4, c4, c4, c3, c3, c4, c1, c5, c4, c5, c5, ol, ol, ol, ol, c5, c4, c3, c2, c2, c2, c2, c2, ol, ol, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c4, c4, c4, c4, c3, c1, c1, c4, c4, c5, ol, bg, bg, bg, bg, ol, ol, ol, ol, ol, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c4, c4, c1, c1, c3, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c5, c4, c1, c1, c3, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, ol, ol, ol, c5, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, ol, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg]
]
hippo_tongue_out = [
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, ol, ol, ol, ol, ol, bg, ol, c3, ol, c4, c5, ol, ol, ol, ol, bg, ol, ol, ol, ol, c5, c4, ol, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, c2, c2, c2, c2, c2, ol, ol, c3, c3, c3, c3, c3, c2, c2, c3, ol, c5, c3, c2, c2, c2, c3, c3, c5, ol, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c2, c2, c3, c3, c3, ol, c3, c3, c4, c3, ol, ol, c3, c3, c3, c3, ol, ol, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c2, c2, c1, c1, c2, c2, ol, c5, c3, c2, ol, ey, ey, ol, c4, c3, ol, ey, ey, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c1, c1, c1, c1, c1, c1, c1, c1, c3, ol, c4, c2, ol, ey, ol, ol, c4, c2, ol, ey, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c3, c1, c3, c3, ol, c5, ol, ol, ol, ol, c2, c2, ol, ol, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c2, c1, c1, c1, c1, c1, c1, c3, c2, ol, c4, c2, c2, c2, c2, c2, c2, c2, c2, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c2, c1, c1, c1, c1, c1, c2, c2, ol, c2, c2, c2, c1, c1, c1, c1, c1, c2, c2, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c2, ol, c4, c4, c3, c1, c1, c1, ol, ol, c1, c1, c1, c1, c1, c1, ol, ol, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c1, c1, c1, c1, c1, c3, c3, ol, c4, c4, c3, c3, c1, c1, ol, c4, ol, c1, c1, c1, c1, c1, ol, c4, ol, ol, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c1, c1, c1, c4, c3, ol, c5, c4, c2, c2, c1, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, ol, ol, ol, c5, ol, bg, bg, bg, bg],
[c2, c4, c2, c2, c2, c1, c1, c4, c4, ol, c5, c4, c2, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c2, c2, c2, c2, c2, c4, ol, c4, c3, c3, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c4, c2, c2, c2, c2, c4, ol, c4, c4, c3, c2, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, c1, c1, ol, ol, c3, ol, bg, bg, bg, bg],
[c4, c3, c4, c4, c4, c2, c2, c3, c3, ol, c5, c4, c3, c2, c1, c1, c3, c2, ol, ol, ol, ol, ol, ol, ol, ol, ol, c3, c2, ol, bg, bg, bg, bg],
[c4, c4, c2, c2, c3, c4, c3, c3, c3, c3, ol, c4, c4, c3, c2, c2, c1, c1, c2, c2, ol, p1, p1, p1, p1, ol, c3, c1, ol, bg, bg, bg, bg, bg],
[c3, c3, c4, c2, c4, c5, c3, c5, c3, c4, c5, ol, c3, c3, c3, c2, c1, c1, c1, c1, c1, ol, p2, p2, ol, c1, c1, c1, ol, bg, bg, bg, bg, bg],
[c4, c3, c3, c3, c3, c3, c4, c4, c2, c4, c5, c5, ol, c5, c4, c4, c3, c1, c1, c1, c1, c1, ol, ol, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg],
[c4, c4, c4, c4, c4, c3, c3, c4, c1, c5, c4, c5, c5, ol, ol, ol, ol, c5, c4, c3, c2, c2, c2, c2, c2, ol, ol, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c4, c4, c4, c4, c3, c1, c1, c4, c4, c5, ol, bg, bg, bg, bg, ol, ol, ol, ol, ol, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c4, c4, c1, c1, c3, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c5, c4, c1, c1, c3, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, ol, ol, ol, c5, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, ol, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg]
]
hippo_lick = [
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, ol, c4, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, ol, ol, ol, ol, ol, bg, ol, c3, ol, c4, c5, ol, ol, ol, ol, bg, ol, ol, ol, ol, c5, c4, ol, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, c2, c2, c2, c2, c2, ol, ol, c3, c3, c3, c3, c3, c2, c2, c3, ol, c5, c3, c2, c2, c2, c3, c3, c5, ol, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c2, c2, c3, c3, c3, ol, c3, c3, c4, c3, ol, ol, c3, c3, c3, c3, ol, ol, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c2, c2, c1, c1, c2, c2, ol, c5, c3, c2, ol, ey, ey, ol, c4, c3, ol, ey, ey, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c2, c1, c1, c1, c1, c1, c1, c1, c1, c3, ol, c4, c2, ol, ey, ol, ol, c4, c2, ol, ey, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c3, c1, c3, c3, ol, c5, ol, ol, ol, ol, c2, c2, ol, ol, ol, ol, c2, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c2, c1, c1, c1, c1, c1, c1, c3, c2, ol, c4, c2, c2, c2, c2, c2, c2, c2, c2, c2, c2, c5, ol, bg, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c2, c1, c1, c1, c1, c1, c2, c2, ol, c2, c2, c2, c1, c1, c1, c1, c1, c2, c2, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg],
[c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c2, ol, c4, c4, c3, c1, c1, c1, ol, ol, c1, c1, c1, c1, c1, c1, ol, ol, bg, bg, bg, bg, bg, bg],
[c2, c2, c1, c1, c1, c1, c1, c1, c3, c3, ol, c4, c4, c3, c3, c1, c1, ol, c4, ol, c1, c1, c1, c1, c1, ol, c4, ol, ol, bg, bg, bg, bg, bg],
[c2, c2, c2, c2, c1, c1, c1, c4, c3, ol, c5, c4, c2, c2, c1, c1, c1, ol, ol, ol, c1, c1, c1, c1, c1, ol, ol, ol, c5, ol, bg, bg, bg, bg],
[c2, c4, c2, c2, c2, c1, c1, c4, c4, ol, c5, c4, c2, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, ol, ol, c1, c1, c4, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c2, c2, c2, c2, c2, c4, ol, c4, c3, c3, c1, c1, c1, c1, c1, c1, c1, c1, c1, ol, p2, p2, ol, c1, c1, c4, ol, bg, bg, bg, bg],
[c3, c3, c3, c4, c2, c2, c2, c2, c4, ol, c4, c4, c3, c2, c1, c1, ol, ol, ol, c1, c1, ol, p1, p1, p1, p1, ol, ol, c3, ol, bg, bg, bg, bg],
[c4, c3, c4, c4, c4, c2, c2, c3, c3, ol, c5, c4, c3, c2, c1, c1, c3, c2, ol, ol, ol, ol, ol, ol, ol, ol, ol, c3, c2, ol, bg, bg, bg, bg],
[c4, c4, c2, c2, c3, c4, c3, c3, c3, c3, ol, c4, c4, c3, c2, c2, c1, c1, c2, c2, c2, c2, c2, c2, c2, c2, c3, c1, ol, bg, bg, bg, bg, bg],
[c3, c3, c4, c2, c4, c5, c3, c5, c3, c4, c5, ol, c3, c3, c3, c2, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, c1, ol, bg, bg, bg, bg, bg],
[c4, c3, c3, c3, c3, c3, c4, c4, c2, c4, c5, c5, ol, c5, c4, c4, c3, c1, c1, c1, c1, c1, c1, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg],
[c4, c4, c4, c4, c4, c3, c3, c4, c1, c5, c4, c5, c5, ol, ol, ol, ol, c5, c4, c3, c2, c2, c2, c2, c2, ol, ol, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c4, c4, c4, c4, c3, c1, c1, c4, c4, c5, ol, bg, bg, bg, bg, ol, ol, ol, ol, ol, ol, ol, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c4, c4, c1, c1, c3, c4, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, c5, c4, c1, c1, c3, c4, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, c5, c5, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[c5, c5, c5, ol, ol, ol, c5, c1, c1, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[ol, ol, ol, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg],
[bg, bg, bg, bg, ol, c5, c1, c1, c3, c3, ol, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg, bg]
]
| 124.480851
| 141
| 0.480703
| 6,983
| 29,253
| 2.012459
| 0.003007
| 0.831709
| 1.163026
| 1.437985
| 0.988188
| 0.988188
| 0.985341
| 0.983776
| 0.983064
| 0.982495
| 0
| 0.132288
| 0.267152
| 29,253
| 235
| 142
| 124.480851
| 0.52323
| 0
| 0
| 0.84141
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004405
| 0
| 0.004405
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
85eb4e256f897a8aba9d111f99de86a226f8afcd
| 7,693
|
py
|
Python
|
src/turtlebot_tag/srv/_GetCommandVelSrv.py
|
ikhatri/turtlebot-tag
|
ebdf15bb8738fdd82ff0a028e0b759f6f9224631
|
[
"MIT"
] | null | null | null |
src/turtlebot_tag/srv/_GetCommandVelSrv.py
|
ikhatri/turtlebot-tag
|
ebdf15bb8738fdd82ff0a028e0b759f6f9224631
|
[
"MIT"
] | null | null | null |
src/turtlebot_tag/srv/_GetCommandVelSrv.py
|
ikhatri/turtlebot-tag
|
ebdf15bb8738fdd82ff0a028e0b759f6f9224631
|
[
"MIT"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from turtlebot_tag/GetCommandVelSrvRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetCommandVelSrvRequest(genpy.Message):
_md5sum = "0e8712b35c44e3e0a577b85bb0362a6a"
_type = "turtlebot_tag/GetCommandVelSrvRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float32 v_0
float32 w_0
"""
__slots__ = ['v_0','w_0']
_slot_types = ['float32','float32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
v_0,w_0
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetCommandVelSrvRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.v_0 is None:
self.v_0 = 0.
if self.w_0 is None:
self.w_0 = 0.
else:
self.v_0 = 0.
self.w_0 = 0.
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_2f().pack(_x.v_0, _x.w_0))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 8
(_x.v_0, _x.w_0,) = _get_struct_2f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_2f().pack(_x.v_0, _x.w_0))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 8
(_x.v_0, _x.w_0,) = _get_struct_2f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2f = None
def _get_struct_2f():
global _struct_2f
if _struct_2f is None:
_struct_2f = struct.Struct("<2f")
return _struct_2f
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from turtlebot_tag/GetCommandVelSrvResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GetCommandVelSrvResponse(genpy.Message):
_md5sum = "96c1b624fe7f4c3723bd9088fdf02fd8"
_type = "turtlebot_tag/GetCommandVelSrvResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float32 C_v
float32 C_w
"""
__slots__ = ['C_v','C_w']
_slot_types = ['float32','float32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
C_v,C_w
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetCommandVelSrvResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.C_v is None:
self.C_v = 0.
if self.C_w is None:
self.C_w = 0.
else:
self.C_v = 0.
self.C_w = 0.
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_2f().pack(_x.C_v, _x.C_w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 8
(_x.C_v, _x.C_w,) = _get_struct_2f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_2f().pack(_x.C_v, _x.C_w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 8
(_x.C_v, _x.C_w,) = _get_struct_2f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2f = None
def _get_struct_2f():
global _struct_2f
if _struct_2f is None:
_struct_2f = struct.Struct("<2f")
return _struct_2f
class GetCommandVelSrv(object):
_type = 'turtlebot_tag/GetCommandVelSrv'
_md5sum = '73fef1a96aacf31f2c19a0e67105926e'
_request_class = GetCommandVelSrvRequest
_response_class = GetCommandVelSrvResponse
| 32.73617
| 145
| 0.66294
| 1,087
| 7,693
| 4.4885
| 0.142594
| 0.036073
| 0.022546
| 0.031154
| 0.868211
| 0.868211
| 0.868211
| 0.868211
| 0.868211
| 0.868211
| 0
| 0.026404
| 0.22215
| 7,693
| 234
| 146
| 32.876068
| 0.788937
| 0.314052
| 0
| 0.834532
| 1
| 0
| 0.106557
| 0.041189
| 0
| 0
| 0.004098
| 0
| 0
| 1
| 0.115108
| false
| 0
| 0.043165
| 0
| 0.366906
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c80d3b1309113c2ff1e8fad303b9419a095552d2
| 134
|
py
|
Python
|
kobert_transformers/utils.py
|
ys7yoo/KoBERT-Transformers
|
4e0a00e5e4884848fe4daeccf3698a28ebcfe449
|
[
"Apache-2.0"
] | 138
|
2020-05-14T10:03:16.000Z
|
2022-03-29T14:52:13.000Z
|
kobert_transformers/utils.py
|
monologg/KoBERT-Transformers
|
4e0a00e5e4884848fe4daeccf3698a28ebcfe449
|
[
"Apache-2.0"
] | 6
|
2020-09-01T13:11:02.000Z
|
2021-08-19T05:13:41.000Z
|
kobert_transformers/utils.py
|
ys7yoo/KoBERT-Transformers
|
4e0a00e5e4884848fe4daeccf3698a28ebcfe449
|
[
"Apache-2.0"
] | 29
|
2020-05-14T10:03:17.000Z
|
2022-02-22T01:49:04.000Z
|
from .tokenization_kobert import KoBertTokenizer
def get_tokenizer():
return KoBertTokenizer.from_pretrained("monologg/kobert")
| 22.333333
| 61
| 0.820896
| 14
| 134
| 7.642857
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104478
| 134
| 5
| 62
| 26.8
| 0.891667
| 0
| 0
| 0
| 0
| 0
| 0.11194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c84eba406f466cabfc627146872b553bbbf7955e
| 4,012
|
py
|
Python
|
tests/pytests/unit/client/test_ssh.py
|
haodeon/salt
|
af2964f4ddbf9c5635d1528a495e473996cc7b71
|
[
"Apache-2.0"
] | null | null | null |
tests/pytests/unit/client/test_ssh.py
|
haodeon/salt
|
af2964f4ddbf9c5635d1528a495e473996cc7b71
|
[
"Apache-2.0"
] | null | null | null |
tests/pytests/unit/client/test_ssh.py
|
haodeon/salt
|
af2964f4ddbf9c5635d1528a495e473996cc7b71
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import salt.client.ssh
from tests.support.helpers import dedent
@pytest.mark.slow_test
@pytest.mark.skip_on_windows(reason="Windows does not support salt-ssh")
@pytest.mark.skip_if_binaries_missing("ssh", check_all=True)
def test_ssh_single__cmd_str(temp_salt_master):
opts = temp_salt_master.config.copy()
argv = []
id_ = "minion"
host = "minion"
single = salt.client.ssh.Single(opts, argv, id_, host, sudo=False)
cmd = single._cmd_str()
expected = dedent(
"""
SUDO=""
if [ -n "" ]
then SUDO=" "
fi
SUDO_USER=""
if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
"""
)
assert expected in cmd
@pytest.mark.slow_test
@pytest.mark.skip_on_windows(reason="Windows does not support salt-ssh")
@pytest.mark.skip_if_binaries_missing("ssh", check_all=True)
def test_ssh_single__cmd_str_sudo(temp_salt_master):
opts = temp_salt_master.config.copy()
argv = []
id_ = "minion"
host = "minion"
single = salt.client.ssh.Single(opts, argv, id_, host, sudo=True)
cmd = single._cmd_str()
expected = dedent(
"""
SUDO=""
if [ -n "sudo" ]
then SUDO="sudo "
fi
SUDO_USER=""
if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
"""
)
assert expected in cmd
@pytest.mark.slow_test
@pytest.mark.skip_on_windows(reason="Windows does not support salt-ssh")
@pytest.mark.skip_if_binaries_missing("ssh", check_all=True)
def test_ssh_single__cmd_str_sudo_user(temp_salt_master):
opts = temp_salt_master.config.copy()
argv = []
id_ = "minion"
host = "minion"
user = "wayne"
single = salt.client.ssh.Single(opts, argv, id_, host, sudo=True, sudo_user=user)
cmd = single._cmd_str()
expected = dedent(
"""
SUDO=""
if [ -n "sudo" ]
then SUDO="sudo "
fi
SUDO_USER="wayne"
if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
"""
)
assert expected in cmd
@pytest.mark.slow_test
@pytest.mark.skip_on_windows(reason="Windows does not support salt-ssh")
@pytest.mark.skip_if_binaries_missing("ssh", check_all=True)
def test_ssh_single__cmd_str_sudo_passwd(temp_salt_master):
opts = temp_salt_master.config.copy()
argv = []
id_ = "minion"
host = "minion"
passwd = "salty"
single = salt.client.ssh.Single(opts, argv, id_, host, sudo=True, passwd=passwd)
cmd = single._cmd_str()
expected = dedent(
"""
SUDO=""
if [ -n "sudo -p '[salt:sudo:d11bd4221135c33324a6bdc09674146fbfdf519989847491e34a689369bbce23]passwd:'" ]
then SUDO="sudo -p '[salt:sudo:d11bd4221135c33324a6bdc09674146fbfdf519989847491e34a689369bbce23]passwd:' "
fi
SUDO_USER=""
if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
"""
)
assert expected in cmd
@pytest.mark.slow_test
@pytest.mark.skip_on_windows(reason="Windows does not support salt-ssh")
@pytest.mark.skip_if_binaries_missing("ssh", check_all=True)
def test_ssh_single__cmd_str_sudo_passwd_user(temp_salt_master):
opts = temp_salt_master.config.copy()
argv = []
id_ = "minion"
host = "minion"
user = "wayne"
passwd = "salty"
single = salt.client.ssh.Single(
opts, argv, id_, host, sudo=True, passwd=passwd, sudo_user=user
)
cmd = single._cmd_str()
expected = dedent(
"""
SUDO=""
if [ -n "sudo -p '[salt:sudo:d11bd4221135c33324a6bdc09674146fbfdf519989847491e34a689369bbce23]passwd:'" ]
then SUDO="sudo -p '[salt:sudo:d11bd4221135c33324a6bdc09674146fbfdf519989847491e34a689369bbce23]passwd:' "
fi
SUDO_USER="wayne"
if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="$SUDO -u $SUDO_USER"
fi
"""
)
assert expected in cmd
| 27.668966
| 114
| 0.621635
| 498
| 4,012
| 4.757028
| 0.106426
| 0.060785
| 0.059097
| 0.037991
| 0.968341
| 0.968341
| 0.968341
| 0.968341
| 0.968341
| 0.953145
| 0
| 0.059603
| 0.247258
| 4,012
| 144
| 115
| 27.861111
| 0.724834
| 0
| 0
| 0.72973
| 0
| 0
| 0.099464
| 0
| 0
| 0
| 0
| 0
| 0.067568
| 1
| 0.067568
| false
| 0.081081
| 0.040541
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
c0a3f681f52ea1c697539d3a53bcf7806bc30de2
| 5,523
|
py
|
Python
|
tsa/src/test/python/thalesians/tsa/testconversions.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 51
|
2019-02-01T19:43:37.000Z
|
2022-03-16T09:07:03.000Z
|
tsa/src/test/python/thalesians/tsa/testconversions.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 2
|
2019-02-23T18:54:22.000Z
|
2019-11-09T01:30:32.000Z
|
tsa/src/test/python/thalesians/tsa/testconversions.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 35
|
2019-02-08T02:00:31.000Z
|
2022-03-01T23:17:00.000Z
|
import datetime as dt
import math
import unittest
import numpy as np
import pandas as pd
import thalesians.tsa.conversions as conv
class TestConversions(unittest.TestCase):
def test_numpy_datetime64_to_python_datetime(self):
self.assertEqual(conv.numpy_datetime64_to_python_datetime(np.datetime64('2017-11-19T10:49:31.357')), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
def test_pandas_timestamp_to_python_datetime(self):
self.assertEqual(conv.pandas_timestamp_to_python_datetime(pd.Timestamp(dt.datetime(2017, 11, 19, 10, 49, 31, 357000))), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
def test_to_python_datetime(self):
self.assertEqual(conv.to_python_datetime(np.datetime64('2017-11-19T10:49:31.357')), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.to_python_datetime(pd.Timestamp(dt.datetime(2017, 11, 19, 10, 49, 31, 357000))), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.to_python_datetime('2017.11.19T10:49:31.357'), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
def test_str_to_int(self):
self.assertEqual(conv.str_to_int('5'), 5)
self.assertEqual(conv.str_to_int(' 5 '), 5)
self.assertEqual(conv.str_to_int(' x ', raise_value_error=False), None)
self.assertEqual(conv.str_to_int(' 5.7 ', raise_value_error=False), None)
def test_str_to_float(self):
self.assertEqual(conv.str_to_float('5'), 5.)
self.assertEqual(conv.str_to_float('5.7'), 5.7)
self.assertEqual(conv.str_to_float(' 5.7 '), 5.7)
self.assertTrue(math.isnan(conv.str_to_float(' x ', raise_value_error=False)))
self.assertEqual(conv.str_to_float(' x ', none_result=None, raise_value_error=False), None)
def test_str_to_date(self):
self.assertEqual(conv.str_to_date('2017.11.19'), dt.date(2017, 11, 19))
self.assertEqual(conv.str_to_date('2017-11-19'), dt.date(2017, 11, 19))
self.assertEqual(conv.str_to_date('2017/11/19'), dt.date(2017, 11, 19))
self.assertEqual(conv.str_to_date('20171119'), dt.date(2017, 11, 19))
self.assertEqual(conv.str_to_date(''), None)
def test_str_to_time(self):
self.assertEqual(conv.str_to_time('10:49:31.357289'), dt.time(10, 49, 31, 357289))
self.assertEqual(conv.str_to_time('10:49:31.357'), dt.time(10, 49, 31, 357000))
self.assertEqual(conv.str_to_time('10:49:31'), dt.time(10, 49, 31))
self.assertEqual(conv.str_to_time('10:49'), dt.time(10, 49))
self.assertEqual(conv.str_to_time(''), None)
def test_str_to_datetime(self):
self.assertEqual(conv.str_to_datetime('2017.11.19 10:49:31.357289'), dt.datetime(2017, 11, 19, 10, 49, 31, 357289))
self.assertEqual(conv.str_to_datetime('2017.11.19 10:49:31.357'), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.str_to_datetime('2017.11.19 10:49:31'), dt.datetime(2017, 11, 19, 10, 49, 31))
self.assertEqual(conv.str_to_datetime('2017.11.19 10:49'), dt.datetime(2017, 11, 19, 10, 49))
self.assertEqual(conv.str_to_datetime('2017-11-19 10:49:31.357289'), dt.datetime(2017, 11, 19, 10, 49, 31, 357289))
self.assertEqual(conv.str_to_datetime('2017-11-19 10:49:31.357'), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.str_to_datetime('2017-11-19 10:49:31'), dt.datetime(2017, 11, 19, 10, 49, 31))
self.assertEqual(conv.str_to_datetime('2017-11-19 10:49'), dt.datetime(2017, 11, 19, 10, 49))
self.assertEqual(conv.str_to_datetime('2017/11/19 10:49:31.357289'), dt.datetime(2017, 11, 19, 10, 49, 31, 357289))
self.assertEqual(conv.str_to_datetime('2017/11/19 10:49:31.357'), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.str_to_datetime('2017/11/19 10:49:31'), dt.datetime(2017, 11, 19, 10, 49, 31))
self.assertEqual(conv.str_to_datetime('2017/11/19 10:49'), dt.datetime(2017, 11, 19, 10, 49))
self.assertEqual(conv.str_to_datetime('2017.11.19T10:49:31.357289'), dt.datetime(2017, 11, 19, 10, 49, 31, 357289))
self.assertEqual(conv.str_to_datetime('2017.11.19T10:49:31.357'), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.str_to_datetime('2017.11.19T10:49:31'), dt.datetime(2017, 11, 19, 10, 49, 31))
self.assertEqual(conv.str_to_datetime('2017.11.19T10:49'), dt.datetime(2017, 11, 19, 10, 49))
self.assertEqual(conv.str_to_datetime('2017-11-19T10:49:31.357289'), dt.datetime(2017, 11, 19, 10, 49, 31, 357289))
self.assertEqual(conv.str_to_datetime('2017-11-19T10:49:31.357'), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.str_to_datetime('2017-11-19T10:49:31'), dt.datetime(2017, 11, 19, 10, 49, 31))
self.assertEqual(conv.str_to_datetime('2017-11-19T10:49'), dt.datetime(2017, 11, 19, 10, 49))
self.assertEqual(conv.str_to_datetime('2017/11/19T10:49:31.357289'), dt.datetime(2017, 11, 19, 10, 49, 31, 357289))
self.assertEqual(conv.str_to_datetime('2017/11/19T10:49:31.357'), dt.datetime(2017, 11, 19, 10, 49, 31, 357000))
self.assertEqual(conv.str_to_datetime('2017/11/19T10:49:31'), dt.datetime(2017, 11, 19, 10, 49, 31))
self.assertEqual(conv.str_to_datetime('2017/11/19T10:49'), dt.datetime(2017, 11, 19, 10, 49))
self.assertEqual(conv.str_to_datetime(''), None)
if __name__ == '__main__':
unittest.main()
| 64.22093
| 174
| 0.674452
| 916
| 5,523
| 3.906114
| 0.060044
| 0.108999
| 0.219117
| 0.192286
| 0.930967
| 0.878424
| 0.863611
| 0.806596
| 0.795975
| 0.740637
| 0
| 0.239708
| 0.155531
| 5,523
| 85
| 175
| 64.976471
| 0.527444
| 0
| 0
| 0.060606
| 0
| 0
| 0.125294
| 0.039109
| 0
| 0
| 0
| 0
| 0.742424
| 1
| 0.121212
| false
| 0
| 0.090909
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c0ad489e49492064a2cc35ab85df16fd1428227c
| 2,512
|
py
|
Python
|
cnn_dm_dict.py
|
ruiyiw/VT-summ
|
897795e307604e724a32ffae175b717ba55b6fa6
|
[
"MIT"
] | null | null | null |
cnn_dm_dict.py
|
ruiyiw/VT-summ
|
897795e307604e724a32ffae175b717ba55b6fa6
|
[
"MIT"
] | null | null | null |
cnn_dm_dict.py
|
ruiyiw/VT-summ
|
897795e307604e724a32ffae175b717ba55b6fa6
|
[
"MIT"
] | null | null | null |
path = "data/cnn_dm_data/"
vocab = {}
punctuation = '",.;:!?' + "'"
with open(path+"train.target", 'r', encoding='utf-8') as fin:
for line in fin:
line = line.strip().split()
for w in line:
if len(w) > 1 and w[0] in punctuation:
w = w[1:]
if len(w) > 1 and w[len(w)-1] in punctuation:
w = w[0:len(w)-2]
if not vocab.__contains__(w):
vocab[w] = 0
with open(path+"train.source", 'r', encoding='utf-8') as fin:
for line in fin:
line = line.strip().split()
for w in line:
if len(w) > 1 and w[0] in punctuation:
w = w[1:]
if len(w) > 1 and w[len(w)-1] in punctuation:
w = w[0:len(w)-2]
if not vocab.__contains__(w):
vocab[w] = 0
with open(path+"val.target", 'r', encoding='utf-8') as fin:
for line in fin:
line = line.strip().split()
for w in line:
if len(w) > 1 and w[0] in punctuation:
w = w[1:]
if len(w) > 1 and w[len(w)-1] in punctuation:
w = w[0:len(w)-2]
if not vocab.__contains__(w):
vocab[w] = 0
with open(path+"val.source", 'r', encoding='utf-8') as fin:
for line in fin:
line = line.strip().split()
for w in line:
if len(w) > 1 and w[0] in punctuation:
w = w[1:]
if len(w) > 1 and w[len(w)-1] in punctuation:
w = w[0:len(w)-2]
if not vocab.__contains__(w):
vocab[w] = 0
with open(path+"test.target", 'r', encoding='utf-8') as fin:
for line in fin:
line = line.strip().split()
for w in line:
if len(w) > 1 and w[0] in punctuation:
w = w[1:]
if len(w) > 1 and w[len(w)-1] in punctuation:
w = w[0:len(w)-2]
if not vocab.__contains__(w):
vocab[w] = 0
with open(path+"test.source", 'r', encoding='utf-8') as fin:
for line in fin:
line = line.strip().split()
for w in line:
if len(w) > 1 and w[0] in punctuation:
w = w[1:]
if len(w) > 1 and w[len(w)-1] in punctuation:
w = w[0:len(w)-2]
if not vocab.__contains__(w):
vocab[w] = 0
with open(path+"vocab.txt", 'w', encoding='utf-8') as fout:
for k in vocab.keys():
fout.write(k)
fout.write('\n')
| 31.4
| 61
| 0.461385
| 389
| 2,512
| 2.912596
| 0.100257
| 0.084731
| 0.079435
| 0.074139
| 0.896734
| 0.896734
| 0.896734
| 0.896734
| 0.896734
| 0.896734
| 0
| 0.035461
| 0.382564
| 2,512
| 79
| 62
| 31.797468
| 0.695035
| 0
| 0
| 0.80597
| 0
| 0
| 0.057393
| 0
| 0.014925
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d0b0aebe295ab8373de11be030771f2f1ad1705
| 217
|
py
|
Python
|
textdistance/algorithms/__init__.py
|
juliangilbey/textdistance
|
2c407d60238abc01395a8bd9cf0ebf5bf8b1287b
|
[
"MIT"
] | 1,401
|
2017-05-08T12:08:51.000Z
|
2019-08-06T06:35:30.000Z
|
textdistance/algorithms/__init__.py
|
juliangilbey/textdistance
|
2c407d60238abc01395a8bd9cf0ebf5bf8b1287b
|
[
"MIT"
] | 31
|
2019-08-18T17:08:31.000Z
|
2021-12-08T13:03:31.000Z
|
textdistance/algorithms/__init__.py
|
juliangilbey/textdistance
|
2c407d60238abc01395a8bd9cf0ebf5bf8b1287b
|
[
"MIT"
] | 131
|
2017-09-21T15:28:57.000Z
|
2019-07-30T14:17:18.000Z
|
# app
from .compression_based import * # noQA
from .edit_based import * # noQA
from .phonetic import * # noQA
from .sequence_based import * # noQA
from .simple import * # noQA
from .token_based import * # noQA
| 24.111111
| 40
| 0.705069
| 29
| 217
| 5.137931
| 0.37931
| 0.402685
| 0.469799
| 0.38255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207373
| 217
| 8
| 41
| 27.125
| 0.866279
| 0.152074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
23b551b052683dbf34ff0b1b462e8c299f7ea48b
| 2,999
|
py
|
Python
|
enemy.py
|
szzszz/alien_invasion
|
a4bb595b002c961ade2efae7101fd35442583786
|
[
"Apache-2.0"
] | 1
|
2019-03-20T06:56:30.000Z
|
2019-03-20T06:56:30.000Z
|
enemy.py
|
szzszz/alien_invasion
|
a4bb595b002c961ade2efae7101fd35442583786
|
[
"Apache-2.0"
] | null | null | null |
enemy.py
|
szzszz/alien_invasion
|
a4bb595b002c961ade2efae7101fd35442583786
|
[
"Apache-2.0"
] | null | null | null |
import pygame
from random import *
class SmallEnemy(pygame.sprite.Sprite):
def __init__(self, bg_size):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load("images/enemy1.png").convert_alpha()
self.destroy_images = []
self.destroy_images.extend([pygame.image.load("images/enemy_destroy_1.png").convert_alpha(),pygame.image. load("images/enemy1.png").convert_alpha()])
self.active = True
self.rect = self.image.get_rect()
self.width,self.height = bg_size[0], bg_size[1]
self.speed = 2
self.rect.left, self.rect.top = randint(0, self.width - self.rect.width), randint(-5*self.height, 0)
self.mask = pygame.mask.from_surface(self.image)
def move(self):
if self.rect.top < self.height:
self.rect.top += self.speed
else:
self.reset()
def reset(self):
self.active = True
self.rect.left, self.rect.top = randint(0, self.width - self.rect.width), randint(-5*self.height, 0)
class MidEnemy(pygame.sprite.Sprite):
energy = 8
def __init__(self, bg_size):
pygame.sprite.Sprite.__init__(self)
self.energy = MidEnemy.energy
self.image = pygame.image.load("images/enemy2.png").convert_alpha()
self.destroy_images = []
self.destroy_images.extend([pygame.image.load("images/enemy_destroy_2.png").convert_alpha(), pygame.image.load("images/enemy2.png").convert_alpha()])
self.active = True
self.rect = self.image.get_rect()
self.width,self.height = bg_size[0], bg_size[1]
self.speed = 1
self.rect.left, self.rect.top = randint(0, self.width - self.rect.width), randint(-10*self.height, -self.height)
self.mask = pygame.mask.from_surface(self.image)
def move(self):
if self.rect.top < self.height:
self.rect.top += self.speed
else:
self.reset()
def reset(self):
self.energy = MidEnemy.energy
self.active = True
self.rect.left, self.rect.top = randint(0, self.width - self.rect.width), randint(-10*self.height, -3*self.height)
class BigEnemy(pygame.sprite.Sprite):
energy = 20
def __init__(self, bg_size):
pygame.sprite.Sprite.__init__(self)
self.energy = BigEnemy.energy
self.image = pygame.image.load("images/enemy3.png").convert_alpha()
self.destroy_images = []
self.destroy_images.extend([pygame.image.load("images/enemy_destroy_3.png").convert_alpha(), pygame.image.load("images/enemy3.png").convert_alpha()])
self.active = True
self.rect = self.image.get_rect()
self.width,self.height = bg_size[0], bg_size[1]
self.speed = 1
self.rect.left, self.rect.top = randint(0, self.width - self.rect.width), randint(-12*self.height, -5*self.height)
self.mask = pygame.mask.from_surface(self.image)
def move(self):
if self.rect.top < self.height:
self.rect.top += self.speed
else:
self.reset()
def reset(self):
self.energy = BigEnemy.energy
self.active = True
self.rect.left, self.rect.top = randint(0, self.width - self.rect.width), randint(-15*self.height, -5*self.height)
| 33.696629
| 152
| 0.693565
| 449
| 2,999
| 4.498886
| 0.11804
| 0.106931
| 0.065347
| 0.093564
| 0.922277
| 0.906436
| 0.888119
| 0.85099
| 0.85099
| 0.770792
| 0
| 0.016471
| 0.149717
| 2,999
| 88
| 153
| 34.079545
| 0.775686
| 0
| 0
| 0.735294
| 0
| 0
| 0.061834
| 0.026795
| 0
| 0
| 0
| 0
| 0
| 1
| 0.132353
| false
| 0
| 0.029412
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23c94c45b4cc9c13f9d8b1020f760faf894f2300
| 490
|
py
|
Python
|
petrophysics/permeability/__init__.py
|
petroGG/petrophysics
|
953023d89969f7970f584604146118ad7123ce30
|
[
"MIT"
] | 47
|
2016-06-30T20:04:02.000Z
|
2021-12-11T17:01:04.000Z
|
petrophysics/permeability/__init__.py
|
Khalilsqu/petrophysics
|
953023d89969f7970f584604146118ad7123ce30
|
[
"MIT"
] | null | null | null |
petrophysics/permeability/__init__.py
|
Khalilsqu/petrophysics
|
953023d89969f7970f584604146118ad7123ce30
|
[
"MIT"
] | 35
|
2016-09-30T17:28:48.000Z
|
2022-01-06T02:24:53.000Z
|
from .permeability import phiperm, fperm, tixier, wyllie_rose, timur, morris_biggs_oil, morris_biggs_gas, schlumberger, coates_dumanoir, coates_deno, lucia, fractureperm
__all__ = ['phiperm',
'fperm',
'tixier',
'wyllie_rose',
'timur',
'morris_biggs_oil',
'morris_biggs_gas',
'schlumberger',
'coates_dumanoir',
'coates_deno',
'lucia',
'fractureperm'
]
| 27.222222
| 173
| 0.553061
| 42
| 490
| 6.02381
| 0.47619
| 0.173913
| 0.142292
| 0.189723
| 0.901186
| 0.901186
| 0.901186
| 0.901186
| 0.901186
| 0.901186
| 0
| 0
| 0.344898
| 490
| 17
| 174
| 28.823529
| 0.788162
| 0
| 0
| 0
| 0
| 0
| 0.247951
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23e3f165d3994893ba3926906248dc75aca7a53b
| 6,531
|
py
|
Python
|
samples/test_InternetService_Create.py
|
oxo42/FpTest
|
b0b6064ab4724351bc9c3401bbba82bc9194da0b
|
[
"Apache-2.0"
] | null | null | null |
samples/test_InternetService_Create.py
|
oxo42/FpTest
|
b0b6064ab4724351bc9c3401bbba82bc9194da0b
|
[
"Apache-2.0"
] | null | null | null |
samples/test_InternetService_Create.py
|
oxo42/FpTest
|
b0b6064ab4724351bc9c3401bbba82bc9194da0b
|
[
"Apache-2.0"
] | 1
|
2018-03-04T21:58:46.000Z
|
2018-03-04T21:58:46.000Z
|
__author__ = 'oxle019'
import fptest
class CreateInternetServiceSuccess(fptest.FpTest):
def test_outgoing_workorders_in_correct_order(self):
expected_workorders = ['radcheck_delete', 'radreply_delete', 'radcheck_insert', 'radreply_insert',
'CreateSubscriber', 'SetPlan', 'ProvisionEntity']
actual_workorders = [wo.name for wo in self.cart_order_tracing.outgoing_workorders]
self.assertListEqual(expected_workorders, actual_workorders)
def test_status(self):
self.assertEqual('OK', self.get_fp_status())
def request(self):
return """
<request>
<so>
<orderId>1001</orderId>
<sod>
<domain>GPON</domain>
<verb>Create</verb>
<customerId>RegressionTesting</customerId>
<originator>WGET</originator>
<priority>10</priority>
<doCheckpoint>false</doCheckpoint>
<dataset>
<param>
<name>routerName</name>
<index>0</index>
<value>router</value>
</param>
<param>
<name>package</name>
<index>0</index>
<value>ZM_HOME_BASIC</value>
</param>
</dataset>
<pod>
<productName>InternetService</productName>
<productVerb>Create</productVerb>
<dataset>
<param>
<name>pppUsername</name>
<index>0</index>
<value>helpdeskgponvoiptest@example.com</value>
</param>
<param>
<name>pppPassword</name>
<index>0</index>
<value>123412</value>
</param>
</dataset>
</pod>
</sod>
</so>
</request>
"""
class CreateInternetService_SubscriberServices_CreateSubscriberFail(fptest.FpTest):
def test_outgoing_workorders_in_correct_order(self):
expected_workorders = [('radcheck_delete', 'WOS_Completed'), ('radreply_delete', 'WOS_Completed'),
('radcheck_insert', 'WOS_Completed'), ('radreply_insert', 'WOS_Completed'),
('CreateSubscriber', 'WOS_FunctionalError'), ('radcheck_delete', 'WOS_Completed'),
('radreply_delete', 'WOS_Completed')]
actual_workorders = [(wo.name, wo.status) for wo in self.cart_order_tracing.outgoing_workorders]
self.assertListEqual(expected_workorders, actual_workorders)
def test_status(self):
self.assertEqual('KO', self.get_fp_status())
def request(self):
return """
<request>
<so>
<orderId>1001</orderId>
<sod>
<domain>GPON</domain>
<verb>Create</verb>
<customerId>RegressionTesting</customerId>
<originator>WGET</originator>
<priority>10</priority>
<doCheckpoint>false</doCheckpoint>
<dataset>
<param>
<name>routerName</name>
<index>0</index>
<value>router</value>
</param>
<param>
<name>package</name>
<index>0</index>
<value>ZM_HOME_BASIC</value>
</param>
</dataset>
<pod>
<productName>InternetService</productName>
<productVerb>Create</productVerb>
<dataset>
<param>
<name>pppUsername</name>
<index>0</index>
<value>fail@example.com</value>
</param>
<param>
<name>pppPassword</name>
<index>0</index>
<value>123412</value>
</param>
</dataset>
</pod>
</sod>
</so>
</request>
"""
class CreateInternetService_SubscriberServices_SetPlanFail(fptest.FpTest):
def test_outgoing_workorders_in_correct_order(self):
# Expect to see a failed SetPlan followed by rollback of the subscriber, radcheck and radreply
expected_workorders = [('radcheck_delete', 'WOS_Completed'), ('radreply_delete', 'WOS_Completed'),
('radcheck_insert', 'WOS_Completed'), ('radreply_insert', 'WOS_Completed'),
('CreateSubscriber', 'WOS_Completed'), ('SetPlan', 'WOS_FunctionalError'),
('DeleteSubscriber', 'WOS_Completed'), ('radcheck_delete', 'WOS_Completed'),
('radreply_delete', 'WOS_Completed')]
actual_workorders = [(wo.name, wo.status) for wo in self.cart_order_tracing.outgoing_workorders]
self.assertListEqual(expected_workorders, actual_workorders)
def test_status(self):
self.assertEqual('KO', self.get_fp_status())
def request(self):
return """
<request>
<so>
<orderId>1001</orderId>
<sod>
<domain>GPON</domain>
<verb>Create</verb>
<customerId>RegressionTesting</customerId>
<originator>WGET</originator>
<priority>10</priority>
<doCheckpoint>false</doCheckpoint>
<dataset>
<param>
<name>routerName</name>
<index>0</index>
<value>router</value>
</param>
<param>
<name>package</name>
<index>0</index>
<value>ZM_HOME_BASIC</value>
</param>
</dataset>
<pod>
<productName>InternetService</productName>
<productVerb>Create</productVerb>
<dataset>
<param>
<name>pppUsername</name>
<index>0</index>
<value>failsetplan@example.com</value>
</param>
<param>
<name>pppPassword</name>
<index>0</index>
<value>123412</value>
</param>
</dataset>
</pod>
</sod>
</so>
</request>
"""
| 36.691011
| 113
| 0.493646
| 503
| 6,531
| 6.228628
| 0.190855
| 0.053623
| 0.038302
| 0.057453
| 0.878072
| 0.878072
| 0.878072
| 0.878072
| 0.878072
| 0.878072
| 0
| 0.012824
| 0.391058
| 6,531
| 178
| 114
| 36.691011
| 0.774956
| 0.014087
| 0
| 0.907975
| 0
| 0
| 0.714042
| 0.19882
| 0
| 0
| 0
| 0
| 0.03681
| 1
| 0.055215
| false
| 0.018405
| 0.006135
| 0.018405
| 0.09816
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9b1bbdc047c278aa0876455c7b9a5ed28b8f5ec1
| 3,730
|
py
|
Python
|
test/plugins/test_python.py
|
Wiredcraft/pipelines
|
2d8db414ada5d39f648ae995a714f572806ab345
|
[
"MIT"
] | 121
|
2016-05-24T03:32:14.000Z
|
2021-12-16T12:12:12.000Z
|
test/plugins/test_python.py
|
Wiredcraft/pipelines
|
2d8db414ada5d39f648ae995a714f572806ab345
|
[
"MIT"
] | 83
|
2016-05-06T08:21:34.000Z
|
2022-02-10T12:45:52.000Z
|
test/plugins/test_python.py
|
Wiredcraft/pipelines
|
2d8db414ada5d39f648ae995a714f572806ab345
|
[
"MIT"
] | 11
|
2016-05-13T09:44:42.000Z
|
2022-03-02T07:41:54.000Z
|
import os
import unittest
from unittest.case import TestCase
from pipelines.pipeline.task import TaskResult, EXECUTION_SUCCESSFUL
from pipelines.plugins.python_executor import PythonExecutor
from pipelines.utils import conf_logging
conf_logging()
class TestPythonExecutor(TestCase):
def test_basic_script(self):
print 'Running test_basic_script'
executor = PythonExecutor()
args = {
'script': 'print "test"'
}
res = executor.execute(args)
self.assertIsInstance(res, TaskResult)
self.assertEqual(res.status, EXECUTION_SUCCESSFUL)
self.assertEqual(res.message.strip(), 'Execution finished')
self.assertEqual(res.data['output'], u'test\n')
def test_basic_file(self):
print 'Running test_basic_script'
executor = PythonExecutor()
args = {
'file': 'test/files/test_python_file.py'
}
res = executor.execute(args)
self.assertIsInstance(res, TaskResult)
self.assertEqual(res.status, EXECUTION_SUCCESSFUL)
self.assertEqual(res.message.strip(), 'Execution finished')
self.assertEqual(res.data['output'], u'test: {"a": 1}\n')
def test_workdir(self):
print 'Running test_workdir'
executor = PythonExecutor()
args = {
'workdir': 'test/files',
'file': 'test_python_file.py'
}
res = executor.execute(args)
self.assertIsInstance(res, TaskResult)
self.assertEqual(res.status, EXECUTION_SUCCESSFUL)
self.assertEqual(res.message.strip(), 'Execution finished')
self.assertEqual(res.data['output'], u'test: {"a": 1}\n')
def test_workdir_abspath(self):
print 'Running test_workdir'
executor = PythonExecutor()
args = {
'workdir': os.path.abspath('test/files'),
'file': 'test_python_file.py'
}
res = executor.execute(args)
self.assertIsInstance(res, TaskResult)
self.assertEqual(res.status, EXECUTION_SUCCESSFUL)
self.assertEqual(res.message.strip(), 'Execution finished')
self.assertEqual(res.data['output'], u'test: {"a": 1}\n')
# def test_workdir_virtualenv(self):
# print 'Running test_workdir'
# executor = PythonExecutor()
# args = {
# 'virtualenv': 'test/files/test_venv',
# 'script': '''
# import dopy
# print dopy.__license__
# '''
#
# }
# res = executor.execute(args)
# self.assertIsInstance(res, TaskResult)
# self.assertEqual(res.data, 'MIT')
# self.assertEqual(res.status, EXECUTION_SUCCESSFUL)
#
# def test_workdir_virtualenv_abs(self):
# print 'Running test_workdir'
# executor = PythonExecutor()
# args = {
# 'virtualenv': os.path.abspath('test/files/test_venv'),
# 'script': '''
# import dopy
# print dopy.__license__
# '''
#
# }
# res = executor.execute(args)
# self.assertIsInstance(res, TaskResult)
# self.assertEqual(res.status, EXECUTION_SUCCESSFUL)
# self.assertEqual(res.message.strip(), 'MIT')
# def test_workdir_virtualenv_file(self):
# print 'Running test_workdir'
# executor = PythonExecutor()
# args = {
# 'virtualenv': os.path.abspath('test/files/test_venv'),
# 'file': 'test/files/test_dopy_import.py'
#
# }
# res = executor.execute(args)
# self.assertIsInstance(res, TaskResult)
# self.assertEqual(res.status, EXECUTION_SUCCESSFUL)
# self.assertEqual(res.message.strip(), 'MIT')
if __name__ == '__main__':
unittest.main()
| 32.434783
| 68
| 0.613941
| 377
| 3,730
| 5.909814
| 0.153846
| 0.121185
| 0.145422
| 0.062837
| 0.808797
| 0.799372
| 0.780072
| 0.780072
| 0.780072
| 0.646768
| 0
| 0.001087
| 0.260322
| 3,730
| 114
| 69
| 32.719298
| 0.806452
| 0.36059
| 0
| 0.589286
| 0
| 0
| 0.162602
| 0.012837
| 0
| 0
| 0
| 0
| 0.285714
| 0
| null | null | 0
| 0.107143
| null | null | 0.089286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ec017266b2fb97762a78e5c6db4e2c9076089b2
| 966
|
py
|
Python
|
python/Tests/tennis_test.py
|
XelekGakure/Tennis-Refactoring-Kata
|
3e7b0620b1bdbfcb79ac734aba6a19526f605609
|
[
"MIT"
] | null | null | null |
python/Tests/tennis_test.py
|
XelekGakure/Tennis-Refactoring-Kata
|
3e7b0620b1bdbfcb79ac734aba6a19526f605609
|
[
"MIT"
] | null | null | null |
python/Tests/tennis_test.py
|
XelekGakure/Tennis-Refactoring-Kata
|
3e7b0620b1bdbfcb79ac734aba6a19526f605609
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import pytest
# class TestTennis:
# @pytest.mark.parametrize('p1_points p2_points score p1_name p2_name'.split(), test_cases)
# def test_get_score_game1(self, p1_points, p2_points, score, p1_name, p2_name):
# game = play_game(TennisGame1, p1_points, p2_points, p1_name, p2_name)
# assert score == game.score()
#
# @pytest.mark.parametrize('p1_points p2_points score p1_name p2_name'.split(), test_cases)
# def test_get_score_game2(self, p1_points, p2_points, score, p1_name, p2_name):
# game = play_game(TennisGame2, p1_points, p2_points, p1_name, p2_name)
# assert score == game.score()
#
# @pytest.mark.parametrize('p1_points p2_points score p1_name p2_name'.split(), test_cases)
# def test_get_score_game3(self, p1_points, p2_points, score, p1_name, p2_name):
# game = play_game(TennisGame3, p1_points, p2_points, p1_name, p2_name)
# assert score == game.score()
| 43.909091
| 95
| 0.699793
| 145
| 966
| 4.310345
| 0.2
| 0.1152
| 0.144
| 0.2304
| 0.864
| 0.864
| 0.864
| 0.864
| 0.864
| 0.864
| 0
| 0.054156
| 0.178054
| 966
| 21
| 96
| 46
| 0.732997
| 0.947205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 11
|
7b450ad25e56fa232095a9ad94f8244d28ee52fb
| 29,899
|
py
|
Python
|
src/rca/plot/plot_rca_timeseries.py
|
josephhardinee/rca
|
b50ce4557b366553495a7a958d8dc30985a8fbd6
|
[
"MIT"
] | 4
|
2020-03-03T14:32:46.000Z
|
2021-06-09T08:42:56.000Z
|
src/rca/plot/plot_rca_timeseries.py
|
josephhardinee/rca
|
b50ce4557b366553495a7a958d8dc30985a8fbd6
|
[
"MIT"
] | 1
|
2021-02-17T17:14:07.000Z
|
2021-02-17T17:14:07.000Z
|
src/rca/plot/plot_rca_timeseries.py
|
josephhardinee/rca
|
b50ce4557b366553495a7a958d8dc30985a8fbd6
|
[
"MIT"
] | 1
|
2020-03-03T14:32:48.000Z
|
2020-03-03T14:32:48.000Z
|
import matplotlib.pyplot as plt
import matplotlib as mpl
import matplotlib.dates as mdates
import numpy as np
import pandas as pd
import string
from matplotlib.dates import MonthLocator, DayLocator, WeekdayLocator, MO, TU, WE, TH, FR, SA, SU
def plot_rca_timeseries_oneradar(
rca_file, output_directory, baseline_date, polarization, scan_type, site, inst, start_date, end_date
):
"""
plot_rca_timeseries_oneradar
Parameters
----------
rca_file: str
path to RCA CSV file
output_directory: str
path to directory for output .png file(s)
baseline_date: str
YYYY-MM-DD format of baseline date in this dataset
polarization: str
specify the polarization(s) desired
'horizontal'
'dual'
scan_type: str
specify if the map is for PPI or RHI
'ppi'
'rhi'
site: str
site abbreviation
inst: str
instrument name
start_date: str
Start date of plot, form YYYY-MM-DD
end_date: str
End date of plot, form YYYY-MM-DD
"""
###############################################################
# Plotting rc parameters
# xtick
plt.rc('xtick', color='k', labelsize=10, direction='out')
plt.rc('xtick.major', size=4, pad=4)
plt.rc('xtick.minor', size=2, pad=4)
# ytick
plt.rc('ytick', color='k', labelsize=10, direction='in')
plt.rc('ytick.major', size=4, pad=4)
plt.rc('ytick.minor', size=2, pad=4)
# figure
plt.rc('figure', titlesize=12, figsize=[8,4], dpi=500, autolayout=False)
# legend
plt.rc('legend', loc='best')
# lines
plt.rc('lines', linewidth=0.5, linestyle='-', marker='o', markersize=3.0)
# font
plt.rc('font', family='sans', style='normal')
# text
plt.rc('mathtext', fontset='dejavusans')
# axes
plt.rc('axes', facecolor='white', linewidth=0.8, grid=True, titlesize=14, labelsize=12)
plt.rc('axes.grid', axis='both', which='both')
# dates
#plt.rc('date.autoformatter', day='%Y-%m-%d')
###############################################################
# Convert string dates to datetime for plotting
baseline_date = pd.to_datetime(baseline_date, format='%Y-%m-%d')
start_date = pd.to_datetime(start_date, format='%Y-%m-%d')
end_date = pd.to_datetime(end_date, format='%Y-%m-%d')
xlim = start_date, end_date
ylim = -4.0, 4.0
base_lw = 2.5
ytext = 2.0
xtext = 8.0
xtext0 = 2.0
c1 = 'k'
df = pd.read_csv(rca_file)
df['DATE'] = pd.to_datetime(df['DATE'], format='%Y-%m-%d')
df = df.sort_values(by="DATE")
h_mean = str(np.nanmean(df["RCA_H"]))[0:4] # slice only first 3 digits
h_std = str(np.nanstd(df["RCA_H"], ddof=1))[0:4]
h_min = str(min(df["RCA_H"]))[0:4]
h_max = str(max(df["RCA_H"]))[0:4]
h_text = (
" \n Mean: "
+ h_mean
+ " \n St. Dev.: "
+ h_std
+ " \n Min.: "
+ h_min
+ " \n Max.: "
+ h_max
+ ""
)
fig, ax = plt.subplots()
ax.axhline(0.0, linestyle="--", color="grey")
ax.plot(df["DATE"], df["RCA_H"], color=c1)
ax.scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100, label='Baseline day')
ax.set_ylabel("RCA value (dB)")
ax.set_title(
"Daily RCA values (Z$_H$) at "
+ site.upper()
+ " "
+ inst.upper()
+ " \n "
+ scan_type.upper()
)
#ax.set_ylim(ylim)
ax.set_xlim(xlim)
#ax.text(xtext, ytext, h_text)
ax.legend()
# biweek = WeekdayLocator(byweekday=MO, interval=2)
# week = WeekdayLocator(byweekday=MO, interval=1)
# days_format = mpl.dates.DateFormatter('%Y-%m-%d')
# ax.xaxis.set_major_locator(biweek)
# ax.xaxis.set_major_formatter(days_format)
# ax.xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.savefig(output_directory + "rca_h_" + scan_type + "_" + site + inst + ".png")
if polarization == "dual":
v_mean = str(np.nanmean(df["RCA_V"]))[0:4] # slice only first 3 digits
v_std = str(np.nanstd(df["RCA_V"], ddof=1))[0:4]
v_min = str(min(df["RCA_V"]))[0:4]
v_max = str(max(df["RCA_V"]))[0:4]
v_text = (
" \n Mean: "
+ v_mean
+ " \n St. Dev.:"
+ v_std
+ " \n Min.: "
+ v_min
+ " \n Max.: "
+ v_max
+ ""
)
fig, ax = plt.subplots()
ax.axhline(0.0, linestyle="--", color="grey")
ax.plot(df["DATE"], df["RCA_V"], color=c1)
ax.scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
ax.set_ylabel("RCA value (dB)")
ax.set_title(
"Daily RCA values (Z$_V$) at "
+ site.upper()
+ " "
+ inst.upper()
+ " \n "
+ scan_type.upper()
)
ax.set_ylim(ylim)
ax.set_xlim(xlim)
ax.text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_locator(biweek)
ax.xaxis.set_major_formatter(days_format)
ax.xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.savefig(
output_directory + "rca_v_" + scan_type + "_" + site + inst + ".png"
)
# Plot H and V together (one plot or dual plot?)
fig, axes = plt.subplots(nrows=2, ncols=1, sharex=True, figsize=[8, 5])
axes[0].axhline(0.0, linestyle="--", color="grey")
axes[0].plot(df["DATE"], df["RCA_H"], color=c1)
axes[0].set_ylabel("RCA value (dB)")
axes[0].set_title(
"Daily RCA values (Z$_H$) at "
+ site.upper()
+ " "
+ inst.upper()
+ "\n "
+ scan_type.upper()
+ " \n Clutter map and Baseline: 2018-03-13"
)
axes[0].set_ylim(ylim)
axes[0].scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
axes[0].text(xtext, ytext, h_text)
axes[0].text(
0.03,
0.87,
"(" + string.ascii_lowercase[0] + ")",
transform=axes[0].transAxes,
size=20,
weight="regular",
)
axes[0].set_ylim(ylim)
axes[0].set_xlim(xlim)
#axes[0].text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
axes[0].xaxis.set_major_locator(biweek)
axes[0].xaxis.set_major_formatter(days_format)
axes[0].xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
axes[1].axhline(0.0, linestyle="--", color="grey")
axes[1].plot(df["DATE"], df["RCA_V"], color=c1)
axes[1].set_ylabel("RCA value (dB)")
axes[1].set_title(
"Daily RCA values (Z$_V$) at "
+ site.upper()
+ " "
+ inst.upper()
+ "\n "
+ scan_type.upper()
+ ""
)
axes[1].set_ylim(ylim)
axes[1].scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
axes[1].text(xtext, ytext - 1, v_text)
axes[1].text(
0.03,
0.87,
"(" + string.ascii_lowercase[1] + ")",
transform=axes[1].transAxes,
size=20,
weight="regular",
)
axes[1].set_ylim(ylim)
axes[1].set_xlim(xlim)
#axes[1].text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
axes[1].xaxis.set_major_locator(biweek)
axes[1].xaxis.set_major_formatter(days_format)
axes[1].xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.savefig(
output_directory + "rca_hv_" + scan_type + "_" + site + inst + ".png"
)
def plot_rca_timeseries_tworadar(
rca_file1,
rca_file2,
output_directory,
baseline_date,
polarization,
scan_type,
site,
inst1,
inst2,
):
"""
plot_rca_timeseries_tworadar
Parameters
----------
rca_file1: str
path to RCA CSV file for radar 1
rca_file2: str
path to RCA CSV file for radar 2
output_directory: str
path to directory for output .png file(s)
baseline_date: str
YYYY-MM-DD format of baseline date in this dataset
polarization: str
specify the polarization(s) desired
'horizontal'
'dual'
scan_type: str
specify if the map is for PPI or RHI
'ppi'
'rhi'
'ppirhi'
site: str
site abbreviation
inst1: str
instrument name for radar 1
inst2: str
instrument name for radar 2
"""
###############################################################
# Plotting rc parameters
# xtick
plt.rc('xtick', color='k', labelsize=10, direction='out')
plt.rc('xtick.major', size=4, pad=4)
plt.rc('xtick.minor', size=2, pad=4)
# ytick
plt.rc('ytick', color='k', labelsize=10, direction='out')
plt.rc('ytick.major', size=4, pad=4)
plt.rc('ytick.minor', size=2, pad=4)
# figure
plt.rc('figure', titlesize=16, figsize=[8,4], dpi=500, autolayout=False)
# legend
plt.rc('legend', loc='best')
# lines
plt.rc('lines', linewidth=0.5, linestyle='-', marker='o', markersize=3.0)
# font
plt.rc('font', family='sans', style='normal')
# text
plt.rc('mathtext', fontset='dejavusans')
# axes
plt.rc('axes', facecolor='white', linewidth=0.8, grid=True, titlesize=14, labelsize=12)
plt.rc('axes.grid', axis='both', which='both')
# dates
#plt.rc('date.autoformatter', day='%Y-%m-%d')
###############################################################
xlim = '2018-11-01', '2019-04-01'
ylim = -4.0, 4.0
base_lw = 2.5
ytext = 2.0
xtext = 8.0
xtext0 = 2.0
c1 = 'k'
c2 = 'grey'
df1 = pd.read_csv(rca_file1)
df2 = pd.read_csv(rca_file2)
df1['DATE'] = pd.to_datetime(df1['DATE'], format='%Y-%m-%d')
df2['DATE'] = pd.to_datetime(df2['DATE'], format='%Y-%m-%d')
df1 = df1.sort_values(by="DATE")
df2 = df2.sort_values(by="DATE")
h_mean1 = str(np.nanmean(df1["RCA_H"]))[0:4] # slice only first 3 digits
h_std1 = str(np.nanstd(df1["RCA_H"], ddof=1))[0:4]
h_min1 = str(min(df1["RCA_H"]))[0:4]
h_max1 = str(max(df1["RCA_H"]))[0:4]
h_text1 = (
" \n Mean: "
+ h_mean1
+ " \n St. Dev.: "
+ h_std1
+ " \n Min.: "
+ h_min1
+ " \n Max.: "
+ h_max1
+ ""
)
h_mean2 = str(np.nanmean(df2["RCA_H"]))[0:4] # slice only first 3 digits
h_std2 = str(np.nanstd(df2["RCA_H"], ddof=1))[0:4]
h_min2 = str(min(df2["RCA_H"]))[0:4]
h_max2 = str(max(df2["RCA_H"]))[0:4]
h_text2 = (
" \n Mean: "
+ h_mean2
+ " \n St. Dev.: "
+ h_std2
+ " \n Min.: "
+ h_min2
+ " \n Max.: "
+ h_max2
+ ""
)
fig, ax = plt.subplots()
ax.axhline(0.0, linestyle="--", color="grey")
ax.plot(df1["DATE"], df1["RCA_H"], color=c1, label=inst1.upper())
ax.plot(df2["DATE"], df2["RCA_H"], color=c2, label=inst2.upper())
ax.scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
ax.set_ylabel("RCA value (dB)")
ax.set_title(
"Daily RCA values (Z$_H$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ " and "
+ inst2.upper()
+ " \n "
+ scan_type.upper()
)
ax.set_ylim(ylim)
ax.set_xlim(xlim)
#ax.text(xtext, ytext, h_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_locator(biweek)
ax.xaxis.set_major_formatter(days_format)
ax.xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.legend()
plt.savefig(
output_directory
+ "rca_h_"
+ scan_type
+ "_"
+ site
+ inst1
+ "_"
+ site
+ inst2
+ ".png"
)
if polarization == "dual":
v_mean1 = str(np.nanmean(df1["RCA_V"]))[0:4] # slice only first 3 digits
v_std1 = str(np.nanstd(df1["RCA_V"], ddof=1))[0:4]
v_min1 = str(min(df1["RCA_V"]))[0:4]
v_max1 = str(max(df1["RCA_V"]))[0:4]
v_text1 = (
" \n Mean: "
+ v_mean1
+ " \n St. Dev.:"
+ v_std1
+ " \n Min.: "
+ v_min1
+ " \n Max.: "
+ v_max1
+ ""
)
v_mean2 = str(np.nanmean(df2["RCA_V"]))[0:4] # slice only first 3 digits
v_std2 = str(np.nanstd(df2["RCA_V"], ddof=1))[0:4]
v_min2 = str(min(df2["RCA_V"]))[0:4]
v_max2 = str(max(df2["RCA_V"]))[0:4]
v_text2 = (
" \n Mean: "
+ v_mean2
+ " \n St. Dev.:"
+ v_std2
+ " \n Min.: "
+ v_min2
+ " \n Max.: "
+ v_max2
+ ""
)
fig, ax = plt.subplots()
ax.axhline(0.0, linestyle="--", color="grey")
ax.plot(df1["DATE"], df1["RCA_V"], color=c1, label=inst1.upper())
ax.plot(df2["DATE"], df2["RCA_V"], color=c2, label=inst2.upper())
ax.scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
ax.set_ylabel("RCA value (dB)")
ax.set_title(
"Daily RCA values ($Z_V$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ " and "
+ inst2.upper()
+ " \n "
+ scan_type.upper()
)
ax.set_ylim(ylim)
ax.set_xlim(xlim)
#ax.text(xtext, ytext, h_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_locator(biweek)
ax.xaxis.set_major_formatter(days_format)
ax.xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.legend()
plt.savefig(
output_directory
+ "rca_v_"
+ scan_type
+ "_"
+ site
+ inst1
+ "_"
+ site
+ inst2
+ ".png"
)
# Plot H and V together (one plot or dual plot?)
fig, axes = plt.subplots(nrows=2, ncols=1, sharex=True, figsize=[8, 5])
axes[0].axhline(0.0, linestyle="--", color="grey")
axes[0].plot(df1["DATE"], df1["RCA_H"], color=c1, label=inst1.upper())
axes[0].plot(df2["DATE"], df2["RCA_H"], color=c2, label=inst2.upper())
axes[0].set_ylabel("RCA value (dB)")
axes[0].set_title(
"Daily RCA values (Z$_H$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ " and "
+ inst2.upper()
+ "\n "
+ scan_type.upper()
)
axes[0].set_ylim(ylim)
axes[0].scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
axes[0].text(
0.03,
0.87,
"(" + string.ascii_lowercase[0] + ")",
transform=axes[0].transAxes,
size=20,
weight="regular",
)
axes[0].set_ylim(ylim)
axes[0].set_xlim(xlim)
#axes[0].text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
axes[0].xaxis.set_major_locator(biweek)
axes[0].xaxis.set_major_formatter(days_format)
axes[0].xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
axes[1].axhline(0.0, linestyle="--", color="grey")
axes[1].plot(df1["DATE"], df1["RCA_V"], color=c1, label=inst1.upper())
axes[1].plot(df2["DATE"], df2["RCA_V"], color=c2, label=inst2.upper())
axes[1].set_ylabel("RCA value (dB)")
axes[1].set_title(
"Daily RCA values (Z$_V$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ " and "
+ inst2.upper()
+ "\n "
+ scan_type.upper()
+ ""
)
axes[1].set_ylim(ylim)
axes[1].scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
axes[1].text(
0.03,
0.87,
"(" + string.ascii_lowercase[1] + ")",
transform=axes[1].transAxes,
size=20,
weight="regular",
)
axes[1].set_ylim(ylim)
axes[1].set_xlim(xlim)
#axes[1].text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
axes[1].xaxis.set_major_locator(biweek)
axes[1].xaxis.set_major_formatter(days_format)
axes[1].xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.legend()
plt.savefig(
output_directory
+ "rca_hv_"
+ scan_type
+ "_"
+ site
+ inst1
+ "_"
+ site
+ inst2
+ ".png"
)
def plot_rca_timeseries_threeradar(
rca_file1,
rca_file2,
rca_file3,
output_directory,
baseline_date,
polarization,
scan_type,
site,
inst1,
inst2,
inst3,
):
"""
plot_rca_timeseries_threeradar
Parameters
----------
rca_file1: str
path to RCA CSV file for radar 1
rca_file2: str
path to RCA CSV file for radar 2
rca_file3: str
path to RCA CSV file for radar 3
output_directory: str
path to directory for output .png file(s)
baseline_date: str
YYYY-MM-DD format of baseline date in this dataset
polarization: str
specify the polarization(s) desired
'horizontal'
'dual'
scan_type: str
specify if the map is for PPI or RHI
'ppi'
'rhi'
site: str
site abbreviation
inst1: str
instrument name for radar 1
inst2: str
instrument name for radar 2
inst3: str
instrument name for radar 3
location: str
site and instrument (use for plot title)
"""
###############################################################
# Plotting rc parameters
# xtick
plt.rc('xtick', color='k', labelsize=10, direction='out')
plt.rc('xtick.major', size=4, pad=4)
plt.rc('xtick.minor', size=2, pad=4)
# ytick
plt.rc('ytick', color='k', labelsize=10, direction='out')
plt.rc('ytick.major', size=4, pad=4)
plt.rc('ytick.minor', size=2, pad=4)
# figure
plt.rc('figure', titlesize=16, figsize=[8,4], dpi=500, autolayout=False)
# legend
plt.rc('legend', loc='best')
# lines
plt.rc('lines', linewidth=0.5, linestyle='-', marker='o', markersize=3.0)
# font
plt.rc('font', family='sans', style='normal')
# text
plt.rc('mathtext', fontset='dejavusans')
# axes
plt.rc('axes', facecolor='white', linewidth=0.8, grid=True, titlesize=14, labelsize=12)
plt.rc('axes.grid', axis='both', which='both')
# dates
#plt.rc('date.autoformatter', day='%Y-%m-%d')
###############################################################
xlim = '2018-11-01', '2019-04-01'
ylim = -10.0, 20.0
base_lw = 2.5
ytext = 2.0
xtext = 8.0
xtext0 = 2.0
c1 = 'k'
c2 = 'grey'
c3 = 'r'
df1 = pd.read_csv(rca_file1)
df2 = pd.read_csv(rca_file2)
df3 = pd.read_csv(rca_file3)
df1['DATE'] = pd.to_datetime(df1['DATE'], format='%Y-%m-%d')
df2['DATE'] = pd.to_datetime(df2['DATE'], format='%Y-%m-%d')
df3['DATE'] = pd.to_datetime(df3['DATE'], format='%Y-%m-%d')
df1 = df1.sort_values(by="DATE")
df2 = df2.sort_values(by="DATE")
df3 = df3.sort_values(by="DATE")
h_mean1 = str(np.nanmean(df1["RCA_H"]))[0:4] # slice only first 3 digits
h_std1 = str(np.nanstd(df1["RCA_H"], ddof=1))[0:4]
h_min1 = str(min(df1["RCA_H"]))[0:4]
h_max1 = str(max(df1["RCA_H"]))[0:4]
h_text1 = (
" \n Mean: "
+ h_mean1
+ " \n St. Dev.: "
+ h_std1
+ " \n Min.: "
+ h_min1
+ " \n Max.: "
+ h_max1
+ ""
)
h_mean2 = str(np.nanmean(df2["RCA_H"]))[0:4] # slice only first 3 digits
h_std2 = str(np.nanstd(df2["RCA_H"], ddof=1))[0:4]
h_min2 = str(min(df2["RCA_H"]))[0:4]
h_max2 = str(max(df2["RCA_H"]))[0:4]
h_text2 = (
" \n Mean: "
+ h_mean2
+ " \n St. Dev.: "
+ h_std2
+ " \n Min.: "
+ h_min2
+ " \n Max.: "
+ h_max2
+ ""
)
h_mean3 = str(np.nanmean(df3["RCA_H"]))[0:4] # slice only first 3 digits
h_std3 = str(np.nanstd(df3["RCA_H"], ddof=1))[0:4]
h_min3 = str(min(df3["RCA_H"]))[0:4]
h_max3 = str(max(df3["RCA_H"]))[0:4]
h_text3 = (
" \n Mean: "
+ h_mean3
+ " \n St. Dev.: "
+ h_std3
+ " \n Min.: "
+ h_min3
+ " \n Max.: "
+ h_max3
+ ""
)
fig, ax = plt.subplots()
ax.axhline(0.0, linestyle="--", color="grey")
ax.plot(df1["DATE"], df1["RCA_H"], color=c1, label=inst1.upper())
ax.plot(df2["DATE"], df2["RCA_H"], color=c2, label=inst2.upper())
ax.plot(df3["DATE"], df3["RCA_H"], color=c3, label=inst3.upper())
ax.scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
ax.set_ylabel("RCA value (dB)")
ax.set_title(
"Daily RCA values (Z$_H$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ ", "
+ inst2.upper()
+ ", "
+ inst3.upper()
+ " \n "
+ scan_type.upper()
)
ax.set_ylim(ylim)
ax.set_xlim(xlim)
#ax.text(xtext, ytext, h_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_locator(biweek)
ax.xaxis.set_major_formatter(days_format)
ax.xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.legend()
plt.savefig(
output_directory
+ "rca_h_"
+ scan_type
+ "_"
+ site
+ inst1
+ "_"
+ site
+ inst2
+ "_"
+ site
+ inst3
+ ".png"
)
if polarization == "dual":
v_mean1 = str(np.nanmean(df1["RCA_V"]))[0:4] # slice only first 3 digits
v_std1 = str(np.nanstd(df1["RCA_V"], ddof=1))[0:4]
v_min1 = str(min(df1["RCA_V"]))[0:4]
v_max1 = str(max(df1["RCA_V"]))[0:4]
v_text1 = (
" \n Mean: "
+ v_mean1
+ " \n St. Dev.:"
+ v_std1
+ " \n Min.: "
+ v_min1
+ " \n Max.: "
+ v_max1
+ ""
)
v_mean2 = str(np.nanmean(df2["RCA_V"]))[0:4] # slice only first 3 digits
v_std2 = str(np.nanstd(df2["RCA_V"], ddof=1))[0:4]
v_min2 = str(min(df2["RCA_V"]))[0:4]
v_max2 = str(max(df2["RCA_V"]))[0:4]
v_text2 = (
" \n Mean: "
+ v_mean2
+ " \n St. Dev.:"
+ v_std2
+ " \n Min.: "
+ v_min2
+ " \n Max.: "
+ v_max2
+ ""
)
v_mean3 = str(np.nanmean(df3["RCA_V"]))[0:4] # slice only first 3 digits
v_std3 = str(np.nanstd(df3["RCA_V"], ddof=1))[0:4]
v_min3 = str(min(df3["RCA_V"]))[0:4]
v_max3 = str(max(df3["RCA_V"]))[0:4]
v_text3 = (
" \n Mean: "
+ v_mean3
+ " \n St. Dev.:"
+ v_std3
+ " \n Min.: "
+ v_min3
+ " \n Max.: "
+ v_max3
+ ""
)
fig, ax = plt.subplots(figsize=[8, 4])
ax.axhline(0.0, linestyle="--", color="grey")
ax.plot(df1["DATE"], df1["RCA_V"], color=c1, label=inst1.upper())
ax.plot(df2["DATE"], df2["RCA_V"], color=c2, label=inst2.upper())
ax.plot(df3["DATE"], df3["RCA_V"], color=c3, label=inst3.upper())
ax.scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
ax.set_ylabel("RCA value (dB)")
ax.set_title(
"Daily RCA values (Z$_V$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ ", "
+ inst2.upper()
+ ", "
+ inst3.upper()
+ " \n "
+ scan_type.upper()
)
ax.set_ylim(ylim)
ax.set_xlim(xlim)
#ax.text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mdates.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_locator(biweek)
ax.xaxis.set_major_formatter(days_format)
ax.xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.legend()
plt.savefig(
output_directory
+ "rca_v_"
+ scan_type
+ "_"
+ site
+ inst1
+ "_"
+ site
+ inst2
+ "_"
+ site
+ inst3
+ ".png"
)
# Plot H and V together (one plot or dual plot?)
fig, axes = plt.subplots(nrows=2, ncols=1, sharex=True, figsize=[8, 5])
axes[0].axhline(0.0, linestyle="--", color="grey")
axes[0].plot(df1["DATE"], df1["RCA_H"], color=c1, label=inst1.upper())
axes[0].plot(df2["DATE"], df2["RCA_H"], color=c2, label=inst2.upper())
axes[0].plot(df3["DATE"], df3["RCA_H"], color=c3, label=inst3.upper())
axes[0].set_ylabel("RCA value (dB)")
axes[0].set_title(
"Daily RCA values (Z$_H$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ ", "
+ inst2.upper()
+ ", "
+ inst3.upper()
+ "\n "
+ scan_type.upper()
)
axes[0].set_ylim(ylim)
axes[0].scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
axes[0].text(
0.03,
0.87,
"(" + string.ascii_lowercase[0] + ")",
transform=axes[0].transAxes,
size=20,
weight="regular",
)
axes[0].set_ylim(ylim)
axes[0].set_xlim(xlim)
#axes[0].text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
axes[0].xaxis.set_major_locator(biweek)
axes[0].xaxis.set_major_formatter(days_format)
axes[0].xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.legend()
axes[1].axhline(0.0, linestyle="--", color="grey")
axes[1].plot(df1["DATE"], df1["RCA_V"], color=c1, label=inst1.upper())
axes[1].plot(df2["DATE"], df2["RCA_V"], color=c2, label=inst2.upper())
axes[1].plot(df3["DATE"], df3["RCA_V"], color=c3, label=inst3.upper())
axes[1].set_ylabel("RCA value (dB)")
axes[1].set_title(
"Daily RCA values (Z$_V$) at "
+ site.upper()
+ " "
+ inst1.upper()
+ ", "
+ inst2.upper()
+ ", "
+ inst3.upper()
+ "\n "
+ scan_type.upper()
)
axes[1].set_ylim(ylim)
axes[1].scatter(baseline_date, 0.0, marker="D", linewidth=base_lw, color="b", zorder=100)
axes[1].text(
0.03,
0.87,
"(" + string.ascii_lowercase[1] + ")",
transform=axes[1].transAxes,
size=20,
weight="regular",
)
axes[1].set_ylim(ylim)
axes[1].set_xlim(xlim)
#axes[1].text(xtext, ytext, v_text)
biweek = WeekdayLocator(byweekday=MO, interval=2)
week = WeekdayLocator(byweekday=MO, interval=1)
days_format = mpl.dates.DateFormatter('%Y-%m-%d')
axes[1].xaxis.set_major_locator(biweek)
axes[1].xaxis.set_major_formatter(days_format)
axes[1].xaxis.set_minor_locator(week)
plt.gcf().autofmt_xdate()
plt.legend()
plt.savefig(
output_directory
+ "rca_hv_"
+ scan_type
+ "_"
+ site
+ inst1
+ "_"
+ site
+ inst2
+ "_"
+ site
+ inst3
+ ".png"
)
| 30.20101
| 110
| 0.499582
| 3,838
| 29,899
| 3.747525
| 0.06592
| 0.006953
| 0.005006
| 0.055065
| 0.930752
| 0.910172
| 0.893555
| 0.88695
| 0.878746
| 0.873879
| 0
| 0.043351
| 0.327235
| 29,899
| 989
| 111
| 30.231547
| 0.671688
| 0.11492
| 0
| 0.838019
| 0
| 0
| 0.104543
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004016
| false
| 0
| 0.009371
| 0
| 0.013387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7b5b14d1c87c8876a8432388b74a96233a6e25e3
| 932
|
py
|
Python
|
hash/hash_blake2.py
|
gorgeousbubble/Nightmare
|
b374b48877898b6193081b7a8a6d2fb571816c75
|
[
"Apache-2.0"
] | 1
|
2019-10-24T15:47:18.000Z
|
2019-10-24T15:47:18.000Z
|
hash/hash_blake2.py
|
gorgeousbubble/Nightmare
|
b374b48877898b6193081b7a8a6d2fb571816c75
|
[
"Apache-2.0"
] | null | null | null |
hash/hash_blake2.py
|
gorgeousbubble/Nightmare
|
b374b48877898b6193081b7a8a6d2fb571816c75
|
[
"Apache-2.0"
] | 3
|
2019-10-24T15:47:25.000Z
|
2020-11-01T01:26:41.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import hashlib
def hash_blake2b(s):
blake2b = hashlib.blake2b(digest_size=16)
blake2b.update(s.encode('utf-8'))
return blake2b.hexdigest()
def hash_blake2b_encode(s):
blake2b = hashlib.blake2b(digest_size=16)
blake2b.update(s.encode('utf-8'))
return blake2b.hexdigest()
def hash_blake2b_check(s, r):
blake2b = hashlib.blake2b(digest_size=16)
blake2b.update(s.encode('utf-8'))
return blake2b.hexdigest() == r
def hash_blake2s(s):
blake2s = hashlib.blake2s(digest_size=16)
blake2s.update(s.encode('utf-8'))
return blake2s.hexdigest()
def hash_blake2s_encode(s):
blake2s = hashlib.blake2s(digest_size=16)
blake2s.update(s.encode('utf-8'))
return blake2s.hexdigest()
def hash_blake2s_check(s, r):
blake2s = hashlib.blake2s(digest_size=16)
blake2s.update(s.encode('utf-8'))
return blake2s.hexdigest() == r
| 23.3
| 45
| 0.69206
| 131
| 932
| 4.801527
| 0.175573
| 0.044515
| 0.114467
| 0.152623
| 0.848967
| 0.848967
| 0.848967
| 0.848967
| 0.848967
| 0.848967
| 0
| 0.063694
| 0.157725
| 932
| 39
| 46
| 23.897436
| 0.73758
| 0.046137
| 0
| 0.64
| 0
| 0
| 0.033822
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.24
| false
| 0
| 0.04
| 0
| 0.52
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7bba49559d3ba096dc31e4602a929ba133fd6ffb
| 6,386
|
py
|
Python
|
data/iwslt.py
|
fallcat/synst
|
0fa4adffa825af4a62b6e739b59c4125a7b6698e
|
[
"BSD-3-Clause"
] | 1
|
2019-09-08T13:55:21.000Z
|
2019-09-08T13:55:21.000Z
|
data/iwslt.py
|
fallcat/synst
|
0fa4adffa825af4a62b6e739b59c4125a7b6698e
|
[
"BSD-3-Clause"
] | 2
|
2019-10-02T15:23:55.000Z
|
2019-10-16T02:38:25.000Z
|
data/iwslt.py
|
fallcat/synst
|
0fa4adffa825af4a62b6e739b59c4125a7b6698e
|
[
"BSD-3-Clause"
] | null | null | null |
'''
Data loading and pre-processing for the IWSLT'16 EN-DE dataset.
'''
import re
from data.annotated import AnnotatedTextDataset
class IWSLTDataset(AnnotatedTextDataset):
''' Class that encapsulates the IWSLT dataset '''
NAME = 'iwslt'
LANGUAGE_PAIR = ('en', 'de')
# WORD_COUNT = (4215814, 4186988)
WORD_COUNT = (1.0360595565014956, 1)
URLS = [
('iwslt_en_de.tgz', 'https://wit3.fbk.eu/archive/2016-01/texts/en/de/en-de.tgz'),
('iwslt_test_en_de.tgz', 'https://wit3.fbk.eu/archive/2016-01-test/texts/en/de/en-de.tgz'),
('iwslt_test_de_en.tgz', 'https://wit3.fbk.eu/archive/2016-01-test/texts/de/en/de-en.tgz'),
]
RAW_SPLITS = {
'train': [
('en-de/train.tags.en-de.en', 'en-de/train.tags.en-de.de')
],
'dev': [
('en-de/IWSLT16.TED.tst2013.en-de.en.xml', 'en-de/IWSLT16.TED.tst2013.en-de.de.xml'),
],
'valid': [
('en-de/IWSLT16.TED.dev2010.en-de.en.xml', 'en-de/IWSLT16.TED.dev2010.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2010.en-de.en.xml', 'en-de/IWSLT16.TED.tst2010.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2011.en-de.en.xml', 'en-de/IWSLT16.TED.tst2011.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2012.en-de.en.xml', 'en-de/IWSLT16.TED.tst2012.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2013.en-de.en.xml', 'en-de/IWSLT16.TED.tst2013.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2014.en-de.en.xml', 'en-de/IWSLT16.TED.tst2014.en-de.de.xml'),
],
'test': [
('en-de/IWSLT16.QED.tst2016.en-de.en.xml', 'de-en/IWSLT16.QED.tst2016.de-en.de.xml'),
('en-de/IWSLT16.TED.tst2015.en-de.en.xml', 'de-en/IWSLT16.TED.tst2015.de-en.de.xml'),
('en-de/IWSLT16.TED.tst2016.en-de.en.xml', 'de-en/IWSLT16.TED.tst2016.de-en.de.xml'),
]
}
SPLITS = {
'train': 'train.tok',
'valid': 'valid.tok',
'dev': 'dev.tok',
'test': 'test.tok'
}
IGNORE_REGEX_LIST = [
re.compile(fr'<\s*{tag}\s*[^>]*\s*>[^<]*<\s*/{tag}\s*>')
for tag in
(
'url', 'keywords', 'speaker', 'talkid',
'title', 'description', 'reviewer', 'translator'
)
]
class IWSLTEnViDataset(AnnotatedTextDataset):
''' Class that encapsulates the IWSLT dataset '''
NAME = 'iwslt'
LANGUAGE_PAIR = ('en', 'vi')
# WORD_COUNT = (4215814, 4186988)
WORD_COUNT = (1.0360595565014956, 1)
URLS = [
('iwslt_en_vi.tgz', 'https://wit3.fbk.eu/archive/2015-01/texts/en/vi/en-vi.tgz'),
('iwslt_test_en_vi.tgz', 'https://wit3.fbk.eu/archive/2015-01-test/texts/en/vi/en-vi.tgz'),
('iwslt_test_vi_en.tgz', 'https://wit3.fbk.eu/archive/2015-01-test/texts/vi/en/vi-en.tgz'),
]
RAW_SPLITS = {
'train': [
('en-de/train.tags.en-de.en', 'en-de/train.tags.en-de.de')
],
'dev': [
('en-de/IWSLT15.TED.tst2013.en-vi.en.xml', 'en-de/IWSLT15.TED.tst2013.en-vi.de.xml'),
],
'valid': [
('en-vi/IWSLT15.TED.dev2010.en-vi.en.xml', 'en-vi/IWSLT15.TED.dev2010.en-vi.vi.xml'),
('en-vi/IWSLT15.TED.tst2010.en-vi.en.xml', 'en-vi/IWSLT15.TED.tst2010.en-vi.vi.xml'),
('en-vi/IWSLT15.TED.tst2011.en-vi.en.xml', 'en-vi/IWSLT15.TED.tst2011.en-vi.vi.xml'),
('en-vi/IWSLT15.TED.tst2012.en-vi.en.xml', 'en-vi/IWSLT15.TED.tst2012.en-vi.vi.xml'),
('en-vi/IWSLT15.TED.tst2013.en-vi.en.xml', 'en-vi/IWSLT15.TED.tst2013.en-vi.vi.xml'),
],
'test': [
('en-vi/IWSLT16.QED.tst2016.en-vi.en.xml', 'de-vi/IWSLT16.QED.tst2016.vi-en.vi.xml'),
('en-vi/IWSLT16.TED.tst2015.en-vi.en.xml', 'de-vi/IWSLT16.TED.tst2015.vi-en.vi.xml'),
('en-vi/IWSLT16.TED.tst2016.en-vi.en.xml', 'de-vi/IWSLT16.TED.tst2016.vi-en.vi.xml'),
]
}
SPLITS = {
'train': 'train.tok',
'valid': 'valid.tok',
'dev': 'dev.tok',
'test': 'test.tok'
}
IGNORE_REGEX_LIST = [
re.compile(fr'<\s*{tag}\s*[^>]*\s*>[^<]*<\s*/{tag}\s*>')
for tag in
(
'url', 'keywords', 'speaker', 'talkid',
'title', 'description', 'reviewer', 'translator'
)
]
class IWSLTEnJaDataset(AnnotatedTextDataset):
''' Class that encapsulates the IWSLT dataset '''
NAME = 'iwslt'
LANGUAGE_PAIR = ('en', 'ja')
# WORD_COUNT = (4215814, 4186988)
WORD_COUNT = (1.0360595565014956, 1)
URLS = [
('iwslt_en_de.tgz', 'https://wit3.fbk.eu/archive/2016-01/texts/en/de/en-de.tgz'),
('iwslt_test_en_de.tgz', 'https://wit3.fbk.eu/archive/2016-01-test/texts/en/de/en-de.tgz'),
('iwslt_test_de_en.tgz', 'https://wit3.fbk.eu/archive/2016-01-test/texts/de/en/de-en.tgz'),
]
RAW_SPLITS = {
'train': [
('en-de/train.tags.en-de.en', 'en-de/train.tags.en-de.de')
],
'dev': [
('en-de/IWSLT16.TED.tst2013.en-de.en.xml', 'en-de/IWSLT16.TED.tst2013.en-de.de.xml'),
],
'valid': [
('en-de/IWSLT16.TED.dev2010.en-de.en.xml', 'en-de/IWSLT16.TED.dev2010.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2010.en-de.en.xml', 'en-de/IWSLT16.TED.tst2010.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2011.en-de.en.xml', 'en-de/IWSLT16.TED.tst2011.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2012.en-de.en.xml', 'en-de/IWSLT16.TED.tst2012.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2013.en-de.en.xml', 'en-de/IWSLT16.TED.tst2013.en-de.de.xml'),
('en-de/IWSLT16.TED.tst2014.en-de.en.xml', 'en-de/IWSLT16.TED.tst2014.en-de.de.xml'),
],
'test': [
('en-de/IWSLT16.QED.tst2016.en-de.en.xml', 'de-en/IWSLT16.QED.tst2016.de-en.de.xml'),
('en-de/IWSLT16.TED.tst2015.en-de.en.xml', 'de-en/IWSLT16.TED.tst2015.de-en.de.xml'),
('en-de/IWSLT16.TED.tst2016.en-de.en.xml', 'de-en/IWSLT16.TED.tst2016.de-en.de.xml'),
]
}
SPLITS = {
'train': 'train.tok',
'valid': 'valid.tok',
'dev': 'dev.tok',
'test': 'test.tok'
}
IGNORE_REGEX_LIST = [
re.compile(fr'<\s*{tag}\s*[^>]*\s*>[^<]*<\s*/{tag}\s*>')
for tag in
(
'url', 'keywords', 'speaker', 'talkid',
'title', 'description', 'reviewer', 'translator'
)
]
| 40.935897
| 99
| 0.55559
| 952
| 6,386
| 3.682773
| 0.086134
| 0.118654
| 0.106674
| 0.127781
| 0.938677
| 0.932972
| 0.932972
| 0.917855
| 0.78494
| 0.78494
| 0
| 0.10225
| 0.220482
| 6,386
| 155
| 100
| 41.2
| 0.602049
| 0.045255
| 0
| 0.691729
| 0
| 0.067669
| 0.590542
| 0.407645
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015038
| 0
| 0.195489
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7405da4fecfa04b1c9df6a15b8743e81b25c26e1
| 153
|
py
|
Python
|
swivel/contracts/__init__.py
|
Swivel-Finance/swivel-py
|
de6979176a2f9f75f089fb28c0bec1e5014974b9
|
[
"MIT"
] | null | null | null |
swivel/contracts/__init__.py
|
Swivel-Finance/swivel-py
|
de6979176a2f9f75f089fb28c0bec1e5014974b9
|
[
"MIT"
] | 2
|
2022-01-19T23:30:33.000Z
|
2022-01-20T18:42:08.000Z
|
swivel/contracts/__init__.py
|
Swivel-Finance/swivel-py
|
de6979176a2f9f75f089fb28c0bec1e5014974b9
|
[
"MIT"
] | null | null | null |
from swivel.contracts.vault_tracker import VaultTracker
from swivel.contracts.market_place import MarketPlace
from swivel.contracts.swivel import Swivel
| 38.25
| 55
| 0.882353
| 20
| 153
| 6.65
| 0.5
| 0.225564
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 153
| 3
| 56
| 51
| 0.943262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b5a9f54b5f264d627f4d1c620c403538288fa6e1
| 1,766
|
py
|
Python
|
src/test_priorityq.py
|
regenalgrant/datastructures
|
e6030a28fb5f6f045dd008fa3c32281737c3aac9
|
[
"MIT"
] | null | null | null |
src/test_priorityq.py
|
regenalgrant/datastructures
|
e6030a28fb5f6f045dd008fa3c32281737c3aac9
|
[
"MIT"
] | null | null | null |
src/test_priorityq.py
|
regenalgrant/datastructures
|
e6030a28fb5f6f045dd008fa3c32281737c3aac9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Prioityq tests."""
import pytest
def test_insert():
"""Test insert method."""
from priorityq import PriorityQueue
test_instance = PriorityQueue()
test_instance.insert('data', 1)
assert test_instance.dict[1] == ['data']
def test_pop_empty():
"""Test pop from empty PQ raises index error."""
from priorityq import PriorityQueue
test_instance = PriorityQueue()
with pytest.raises(IndexError):
test_instance.pop()
def test_pop():
"""Test pop method."""
from priorityq import PriorityQueue
test_instance = PriorityQueue()
test_instance.insert('data', 1)
assert test_instance.pop() == ('data')
def test_pop_multiple():
"""Test correct value returned from PriorityQ."""
from priorityq import PriorityQueue
test_instance = PriorityQueue()
test_instance.insert('data_one', 1)
test_instance.insert('data_two', 2)
test_instance.insert('data_three', 3)
assert test_instance.pop() == "data_one"
def test_peek_empty():
"""Test peek empty PriorityQ raises index error."""
from priorityq import PriorityQueue
test_instance = PriorityQueue()
with pytest.raises(IndexError):
test_instance.peek()
def test_peek():
"""Test peek method."""
from priorityq import PriorityQueue
test_instance = PriorityQueue()
test_instance.insert('data', 1)
assert test_instance.peek() == "data"
def test_peek_multiple():
"""Test correct value PriorityQ using multiple values."""
from priorityq import PriorityQueue
test_instance = PriorityQueue()
test_instance.insert('data_one', 1)
test_instance.insert('data_two', 2)
test_instance.insert('data_three', 3)
assert test_instance.peek() == "data_one"
| 27.59375
| 61
| 0.691393
| 211
| 1,766
| 5.587678
| 0.184834
| 0.234097
| 0.254453
| 0.167939
| 0.741306
| 0.715861
| 0.715861
| 0.715861
| 0.715861
| 0.715861
| 0
| 0.007666
| 0.187429
| 1,766
| 63
| 62
| 28.031746
| 0.813937
| 0.157418
| 0
| 0.625
| 0
| 0
| 0.06358
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.175
| false
| 0
| 0.2
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b5b1d0f596a01aed8d9a8156f95723237ef55dc0
| 32,290
|
tac
|
Python
|
optimization/spim_tests/ours/queue.tac
|
luispadron/Decaf-Lang
|
d3e77a15d2a9c118c53255c0be82eb2ef1023c00
|
[
"MIT"
] | null | null | null |
optimization/spim_tests/ours/queue.tac
|
luispadron/Decaf-Lang
|
d3e77a15d2a9c118c53255c0be82eb2ef1023c00
|
[
"MIT"
] | null | null | null |
optimization/spim_tests/ours/queue.tac
|
luispadron/Decaf-Lang
|
d3e77a15d2a9c118c53255c0be82eb2ef1023c00
|
[
"MIT"
] | null | null | null |
# standard Decaf preamble
.text
.align 2
.globl main
_QueueItem.Init:
# BeginFunc 0
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $zero, 4($fp) # fill this to $zero from $fp+4
lw $zero, 8($fp) # fill data to $zero from $fp+8
lw $zero, 12($fp) # fill next to $zero from $fp+12
lw $zero, 16($fp) # fill prev to $zero from $fp+16
# *(this + 4) = data
lw $v0, 8($fp) # fill data to $v0 from $fp+8
lw $v1, 4($fp) # fill this to $v1 from $fp+4
sw $v0, 4($v1) # store with offset
# *(this + 8) = next
lw $v0, 12($fp) # fill next to $v0 from $fp+12
lw $v1, 4($fp) # fill this to $v1 from $fp+4
sw $v0, 8($v1) # store with offset
# *(next + 12) = this
lw $v0, 4($fp) # fill this to $v0 from $fp+4
lw $v1, 12($fp) # fill next to $v1 from $fp+12
sw $v0, 12($v1) # store with offset
# *(this + 12) = prev
lw $v0, 16($fp) # fill prev to $v0 from $fp+16
lw $v1, 4($fp) # fill this to $v1 from $fp+4
sw $v0, 12($v1) # store with offset
# *(prev + 8) = this
lw $v0, 4($fp) # fill this to $v0 from $fp+4
lw $v1, 16($fp) # fill prev to $v1 from $fp+16
sw $v0, 8($v1) # store with offset
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_QueueItem.GetData:
# BeginFunc 4
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
subu $sp, $sp, 4 # decrement sp to make space for locals/temps
lw $zero, 4($fp) # fill this to $zero from $fp+4
# _tmp0 = *(this + 4)
lw $v0, 4($fp) # fill this to $v0 from $fp+4
lw $t0, 4($v0) # load with offset
# Return _tmp0
move $v0, $t0 # assign return value into $v0
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_QueueItem.GetNext:
# BeginFunc 4
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
subu $sp, $sp, 4 # decrement sp to make space for locals/temps
lw $zero, 4($fp) # fill this to $zero from $fp+4
# _tmp1 = *(this + 8)
lw $v0, 4($fp) # fill this to $v0 from $fp+4
lw $t0, 8($v0) # load with offset
# Return _tmp1
move $v0, $t0 # assign return value into $v0
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_QueueItem.GetPrev:
# BeginFunc 4
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
subu $sp, $sp, 4 # decrement sp to make space for locals/temps
lw $zero, 4($fp) # fill this to $zero from $fp+4
# _tmp2 = *(this + 12)
lw $v0, 4($fp) # fill this to $v0 from $fp+4
lw $t0, 12($v0) # load with offset
# Return _tmp2
move $v0, $t0 # assign return value into $v0
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_QueueItem.SetNext:
# BeginFunc 0
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $zero, 4($fp) # fill this to $zero from $fp+4
lw $zero, 8($fp) # fill n to $zero from $fp+8
# *(this + 8) = n
lw $v0, 8($fp) # fill n to $v0 from $fp+8
lw $v1, 4($fp) # fill this to $v1 from $fp+4
sw $v0, 8($v1) # store with offset
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_QueueItem.SetPrev:
# BeginFunc 0
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $zero, 4($fp) # fill this to $zero from $fp+4
lw $zero, 8($fp) # fill p to $zero from $fp+8
# *(this + 12) = p
lw $v0, 8($fp) # fill p to $v0 from $fp+8
lw $v1, 4($fp) # fill this to $v1 from $fp+4
sw $v0, 12($v1) # store with offset
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
# VTable for class QueueItem
.data
.align 2
QueueItem: # label for class QueueItem vtable
.word _QueueItem.Init
.word _QueueItem.GetData
.word _QueueItem.GetNext
.word _QueueItem.GetPrev
.word _QueueItem.SetNext
.word _QueueItem.SetPrev
.text
_Queue.Init:
# BeginFunc 36
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
subu $sp, $sp, 36 # decrement sp to make space for locals/temps
lw $t2, 4($fp) # fill this to $t2 from $fp+4
# _tmp3 = 16
li $t0, 16 # load constant value 16 into $t0
# PushParam _tmp3
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# _tmp4 = LCall _Alloc
sw $t2, 4($fp) # spill this from $t2 to $fp+4
jal _Alloc # jump to function
move $t1, $v0 # copy function return value from $v0
lw $t2, 4($fp) # fill this to $t2 from $fp+4
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp5 = QueueItem
la $t0, QueueItem # load label
# *(_tmp4) = _tmp5
sw $t0, 0($t1) # store with offset
# *(this + 4) = _tmp4
sw $t1, 4($t2) # store with offset
# _tmp6 = 0
li $t4, 0 # load constant value 0 into $t4
# _tmp7 = *(this + 4)
lw $t3, 4($t2) # load with offset
# _tmp8 = *(this + 4)
lw $t2, 4($t2) # load with offset
# _tmp9 = *(this + 4)
lw $t1, 4($t2) # load with offset
# _tmp10 = *(_tmp9)
lw $t0, 0($t1) # load with offset
# _tmp11 = *(_tmp10)
lw $t0, 0($t0) # load with offset
# PushParam _tmp8
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# PushParam _tmp7
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t3, 4($sp) # copy param value to stack
# PushParam _tmp6
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t4, 4($sp) # copy param value to stack
# PushParam _tmp9
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# ACall _tmp11
jalr $t0 # jump to function
# PopParams 16
add $sp, $sp, 16 # pop params off stack
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_Queue.EnQueue:
# BeginFunc 44
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
subu $sp, $sp, 44 # decrement sp to make space for locals/temps
lw $t2, 4($fp) # fill this to $t2 from $fp+4
lw $t4, 8($fp) # fill i to $t4 from $fp+8
# _tmp12 = 16
li $t0, 16 # load constant value 16 into $t0
# PushParam _tmp12
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# _tmp13 = LCall _Alloc
sw $t2, 4($fp) # spill this from $t2 to $fp+4
sw $t4, 8($fp) # spill i from $t4 to $fp+8
jal _Alloc # jump to function
move $t1, $v0 # copy function return value from $v0
lw $t2, 4($fp) # fill this to $t2 from $fp+4
lw $t4, 8($fp) # fill i to $t4 from $fp+8
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp14 = QueueItem
la $t0, QueueItem # load label
# *(_tmp13) = _tmp14
sw $t0, 0($t1) # store with offset
# temp = _tmp13
move $t3, $t1 # copy regs
# _tmp15 = *(this + 4)
lw $t1, 4($t2) # load with offset
# _tmp16 = *(_tmp15)
lw $t0, 0($t1) # load with offset
# _tmp17 = *(_tmp16 + 8)
lw $t0, 8($t0) # load with offset
# PushParam _tmp15
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# _tmp18 = ACall _tmp17
sw $t2, 4($fp) # spill this from $t2 to $fp+4
sw $t4, 8($fp) # spill i from $t4 to $fp+8
sw $t3, -8($fp) # spill temp from $t3 to $fp-8
jalr $t0 # jump to function
move $t0, $v0 # copy function return value from $v0
lw $t2, 4($fp) # fill this to $t2 from $fp+4
lw $t4, 8($fp) # fill i to $t4 from $fp+8
lw $t3, -8($fp) # fill temp to $t3 from $fp-8
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp19 = *(this + 4)
lw $t2, 4($t2) # load with offset
# _tmp20 = *(temp)
lw $t1, 0($t3) # load with offset
# _tmp21 = *(_tmp20)
lw $t1, 0($t1) # load with offset
# PushParam _tmp19
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# PushParam _tmp18
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# PushParam i
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t4, 4($sp) # copy param value to stack
# PushParam temp
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t3, 4($sp) # copy param value to stack
# ACall _tmp21
jalr $t1 # jump to function
# PopParams 16
add $sp, $sp, 16 # pop params off stack
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_Queue.DeQueue:
# BeginFunc 132
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
subu $sp, $sp, 132 # decrement sp to make space for locals/temps
lw $t2, 4($fp) # fill this to $t2 from $fp+4
# _tmp22 = *(this + 4)
lw $t1, 4($t2) # load with offset
# _tmp23 = *(_tmp22)
lw $t0, 0($t1) # load with offset
# _tmp24 = *(_tmp23 + 12)
lw $t0, 12($t0) # load with offset
# PushParam _tmp22
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# _tmp25 = ACall _tmp24
sw $t2, 4($fp) # spill this from $t2 to $fp+4
jalr $t0 # jump to function
move $t0, $v0 # copy function return value from $v0
lw $t2, 4($fp) # fill this to $t2 from $fp+4
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp26 = *(this + 4)
lw $t1, 4($t2) # load with offset
# _tmp27 = _tmp25 == _tmp26
seq $t0, $t0, $t1
# IfZ _tmp27 Goto _L0
beqz $t0, _L0 # branch if _tmp27 is zero
# _tmp28 = "Queue Is Empty"
.data # create string constant marked with label
_string1: .asciiz "Queue Is Empty"
.text
la $t0, _string1 # load label
# PushParam _tmp28
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# LCall _PrintString
jal _PrintString # jump to function
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp29 = 0
li $t0, 0 # load constant value 0 into $t0
# Return _tmp29
move $v0, $t0 # assign return value into $v0
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
# Goto _L1
b _L1 # unconditional branch
_L0:
# _tmp30 = *(this + 4)
lw $t1, 4($t2) # load with offset
# _tmp31 = *(_tmp30)
lw $t0, 0($t1) # load with offset
# _tmp32 = *(_tmp31 + 12)
lw $t0, 12($t0) # load with offset
# PushParam _tmp30
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# _tmp33 = ACall _tmp32
jalr $t0 # jump to function
move $t0, $v0 # copy function return value from $v0
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# temp = _tmp33
move $t4, $t0 # copy regs
# _tmp34 = *(temp)
lw $t0, 0($t4) # load with offset
# _tmp35 = *(_tmp34 + 4)
lw $t0, 4($t0) # load with offset
# PushParam temp
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t4, 4($sp) # copy param value to stack
# _tmp36 = ACall _tmp35
sw $t4, -44($fp) # spill temp from $t4 to $fp-44
jalr $t0 # jump to function
move $t0, $v0 # copy function return value from $v0
lw $t4, -44($fp) # fill temp to $t4 from $fp-44
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# val = _tmp36
move $t5, $t0 # copy regs
# _tmp37 = *(temp)
lw $t0, 0($t4) # load with offset
# _tmp38 = *(_tmp37 + 8)
lw $t0, 8($t0) # load with offset
# PushParam temp
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t4, 4($sp) # copy param value to stack
# _tmp39 = ACall _tmp38
sw $t5, -8($fp) # spill val from $t5 to $fp-8
sw $t4, -44($fp) # spill temp from $t4 to $fp-44
jalr $t0 # jump to function
move $t2, $v0 # copy function return value from $v0
lw $t5, -8($fp) # fill val to $t5 from $fp-8
lw $t4, -44($fp) # fill temp to $t4 from $fp-44
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp40 = *(temp)
lw $t0, 0($t4) # load with offset
# _tmp41 = *(_tmp40 + 12)
lw $t0, 12($t0) # load with offset
# PushParam temp
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t4, 4($sp) # copy param value to stack
# _tmp42 = ACall _tmp41
sw $t5, -8($fp) # spill val from $t5 to $fp-8
sw $t4, -44($fp) # spill temp from $t4 to $fp-44
sw $t2, -84($fp) # spill _tmp39 from $t2 to $fp-84
jalr $t0 # jump to function
move $t1, $v0 # copy function return value from $v0
lw $t5, -8($fp) # fill val to $t5 from $fp-8
lw $t4, -44($fp) # fill temp to $t4 from $fp-44
lw $t2, -84($fp) # fill _tmp39 to $t2 from $fp-84
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp43 = *(_tmp42)
lw $t3, 0($t1) # load with offset
# _tmp44 = *(_tmp43 + 16)
lw $t0, 16($t3) # load with offset
# PushParam _tmp39
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# PushParam _tmp42
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# ACall _tmp44
sw $t5, -8($fp) # spill val from $t5 to $fp-8
sw $t4, -44($fp) # spill temp from $t4 to $fp-44
jalr $t0 # jump to function
lw $t5, -8($fp) # fill val to $t5 from $fp-8
lw $t4, -44($fp) # fill temp to $t4 from $fp-44
# PopParams 8
add $sp, $sp, 8 # pop params off stack
# _tmp45 = *(temp)
lw $t0, 0($t4) # load with offset
# _tmp46 = *(_tmp45 + 12)
lw $t0, 12($t0) # load with offset
# PushParam temp
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t4, 4($sp) # copy param value to stack
# _tmp47 = ACall _tmp46
sw $t5, -8($fp) # spill val from $t5 to $fp-8
sw $t4, -44($fp) # spill temp from $t4 to $fp-44
jalr $t0 # jump to function
move $t3, $v0 # copy function return value from $v0
lw $t5, -8($fp) # fill val to $t5 from $fp-8
lw $t4, -44($fp) # fill temp to $t4 from $fp-44
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp48 = *(temp)
lw $t0, 0($t4) # load with offset
# _tmp49 = *(_tmp48 + 8)
lw $t0, 8($t0) # load with offset
# PushParam temp
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t4, 4($sp) # copy param value to stack
# _tmp50 = ACall _tmp49
sw $t5, -8($fp) # spill val from $t5 to $fp-8
sw $t3, -116($fp) # spill _tmp47 from $t3 to $fp-116
jalr $t0 # jump to function
move $t1, $v0 # copy function return value from $v0
lw $t5, -8($fp) # fill val to $t5 from $fp-8
lw $t3, -116($fp) # fill _tmp47 to $t3 from $fp-116
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp51 = *(_tmp50)
lw $t0, 0($t1) # load with offset
# _tmp52 = *(_tmp51 + 20)
lw $t2, 20($t0) # load with offset
# PushParam _tmp47
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t3, 4($sp) # copy param value to stack
# PushParam _tmp50
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# ACall _tmp52
sw $t5, -8($fp) # spill val from $t5 to $fp-8
jalr $t2 # jump to function
lw $t5, -8($fp) # fill val to $t5 from $fp-8
# PopParams 8
add $sp, $sp, 8 # pop params off stack
_L1:
# Return val
move $v0, $t5 # assign return value into $v0
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
# VTable for class Queue
.data
.align 2
Queue: # label for class Queue vtable
.word _Queue.Init
.word _Queue.EnQueue
.word _Queue.DeQueue
.text
main:
# BeginFunc 196
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
subu $sp, $sp, 196 # decrement sp to make space for locals/temps
# _tmp53 = 8
li $t0, 8 # load constant value 8 into $t0
# PushParam _tmp53
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# _tmp54 = LCall _Alloc
jal _Alloc # jump to function
move $t1, $v0 # copy function return value from $v0
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp55 = Queue
la $t0, Queue # load label
# *(_tmp54) = _tmp55
sw $t0, 0($t1) # store with offset
# q = _tmp54
move $t1, $t1 # copy regs
# _tmp56 = *(q)
lw $t2, 0($t1) # load with offset
# _tmp57 = *(_tmp56)
lw $t2, 0($t2) # load with offset
# PushParam q
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# ACall _tmp57
sw $t1, -8($fp) # spill q from $t1 to $fp-8
jalr $t2 # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp58 = 0
li $t2, 0 # load constant value 0 into $t2
# i = _tmp58
move $t0, $t2 # copy regs
_L2:
# _tmp59 = 10
li $t2, 10 # load constant value 10 into $t2
# _tmp60 = i == _tmp59
seq $t3, $t0, $t2
# _tmp61 = 0
li $t2, 0 # load constant value 0 into $t2
# _tmp62 = _tmp60 == _tmp61
seq $t2, $t3, $t2
# IfZ _tmp62 Goto _L3
beqz $t2, _L3 # branch if _tmp62 is zero
# _tmp63 = *(q)
lw $t2, 0($t1) # load with offset
# _tmp64 = *(_tmp63 + 4)
lw $t2, 4($t2) # load with offset
# PushParam i
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# PushParam q
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# ACall _tmp64
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jalr $t2 # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 8
add $sp, $sp, 8 # pop params off stack
# _tmp65 = 1
li $t2, 1 # load constant value 1 into $t2
# _tmp66 = i + _tmp65
add $t2, $t0, $t2
# i = _tmp66
move $t0, $t2 # copy regs
# Goto _L2
b _L2 # unconditional branch
_L3:
# _tmp67 = 0
li $t2, 0 # load constant value 0 into $t2
# i = _tmp67
move $t0, $t2 # copy regs
_L4:
# _tmp68 = 4
li $t2, 4 # load constant value 4 into $t2
# _tmp69 = i == _tmp68
seq $t3, $t0, $t2
# _tmp70 = 0
li $t2, 0 # load constant value 0 into $t2
# _tmp71 = _tmp69 == _tmp70
seq $t2, $t3, $t2
# IfZ _tmp71 Goto _L5
beqz $t2, _L5 # branch if _tmp71 is zero
# _tmp72 = *(q)
lw $t2, 0($t1) # load with offset
# _tmp73 = *(_tmp72 + 8)
lw $t2, 8($t2) # load with offset
# PushParam q
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# _tmp74 = ACall _tmp73
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jalr $t2 # jump to function
move $t2, $v0 # copy function return value from $v0
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# PushParam _tmp74
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# LCall _PrintInt
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jal _PrintInt # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp75 = " "
.data # create string constant marked with label
_string2: .asciiz " "
.text
la $t2, _string2 # load label
# PushParam _tmp75
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# LCall _PrintString
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jal _PrintString # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp76 = 1
li $t2, 1 # load constant value 1 into $t2
# _tmp77 = i + _tmp76
add $t2, $t0, $t2
# i = _tmp77
move $t0, $t2 # copy regs
# Goto _L4
b _L4 # unconditional branch
_L5:
# _tmp78 = "\n"
.data # create string constant marked with label
_string3: .asciiz "\n"
.text
la $t2, _string3 # load label
# PushParam _tmp78
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# LCall _PrintString
sw $t1, -8($fp) # spill q from $t1 to $fp-8
jal _PrintString # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp79 = 0
li $t2, 0 # load constant value 0 into $t2
# i = _tmp79
move $t0, $t2 # copy regs
_L6:
# _tmp80 = 10
li $t2, 10 # load constant value 10 into $t2
# _tmp81 = i == _tmp80
seq $t3, $t0, $t2
# _tmp82 = 0
li $t2, 0 # load constant value 0 into $t2
# _tmp83 = _tmp81 == _tmp82
seq $t2, $t3, $t2
# IfZ _tmp83 Goto _L7
beqz $t2, _L7 # branch if _tmp83 is zero
# _tmp84 = *(q)
lw $t2, 0($t1) # load with offset
# _tmp85 = *(_tmp84 + 4)
lw $t2, 4($t2) # load with offset
# PushParam i
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# PushParam q
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# ACall _tmp85
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jalr $t2 # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 8
add $sp, $sp, 8 # pop params off stack
# _tmp86 = 1
li $t2, 1 # load constant value 1 into $t2
# _tmp87 = i + _tmp86
add $t2, $t0, $t2
# i = _tmp87
move $t0, $t2 # copy regs
# Goto _L6
b _L6 # unconditional branch
_L7:
# _tmp88 = 0
li $t2, 0 # load constant value 0 into $t2
# i = _tmp88
move $t0, $t2 # copy regs
_L8:
# _tmp89 = 17
li $t2, 17 # load constant value 17 into $t2
# _tmp90 = i == _tmp89
seq $t3, $t0, $t2
# _tmp91 = 0
li $t2, 0 # load constant value 0 into $t2
# _tmp92 = _tmp90 == _tmp91
seq $t2, $t3, $t2
# IfZ _tmp92 Goto _L9
beqz $t2, _L9 # branch if _tmp92 is zero
# _tmp93 = *(q)
lw $t2, 0($t1) # load with offset
# _tmp94 = *(_tmp93 + 8)
lw $t2, 8($t2) # load with offset
# PushParam q
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t1, 4($sp) # copy param value to stack
# _tmp95 = ACall _tmp94
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jalr $t2 # jump to function
move $t2, $v0 # copy function return value from $v0
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# PushParam _tmp95
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# LCall _PrintInt
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jal _PrintInt # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp96 = " "
.data # create string constant marked with label
_string4: .asciiz " "
.text
la $t2, _string4 # load label
# PushParam _tmp96
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t2, 4($sp) # copy param value to stack
# LCall _PrintString
sw $t1, -8($fp) # spill q from $t1 to $fp-8
sw $t0, -12($fp) # spill i from $t0 to $fp-12
jal _PrintString # jump to function
lw $t1, -8($fp) # fill q to $t1 from $fp-8
lw $t0, -12($fp) # fill i to $t0 from $fp-12
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# _tmp97 = 1
li $t2, 1 # load constant value 1 into $t2
# _tmp98 = i + _tmp97
add $t2, $t0, $t2
# i = _tmp98
move $t0, $t2 # copy regs
# Goto _L8
b _L8 # unconditional branch
_L9:
# _tmp99 = "\n"
.data # create string constant marked with label
_string5: .asciiz "\n"
.text
la $t0, _string5 # load label
# PushParam _tmp99
subu $sp, $sp, 4 # decrement sp to make space for param
sw $t0, 4($sp) # copy param value to stack
# LCall _PrintString
jal _PrintString # jump to function
# PopParams 4
add $sp, $sp, 4 # pop params off stack
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_PrintInt:
subu $sp, $sp, 8 # decrement sp to make space to save ra,fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $a0, 4($fp) # fill a from $fp+4
# LCall _PrintInt
li $v0, 1
syscall
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_ReadInteger:
subu $sp, $sp, 8 # decrement sp to make space to save ra,fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
li $v0, 5
syscall
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_PrintBool:
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $a0, 4($fp) # fill a from $fp+4
li $v0, 4
beq $a0, $0, PrintBoolFalse
la $a0, _PrintBoolTrueString
j PrintBoolEnd
PrintBoolFalse:
la $a0, _PrintBoolFalseString
PrintBoolEnd:
syscall
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
.data # create string constant marked with label
_PrintBoolTrueString: .asciiz "true"
.text
.data # create string constant marked with label
_PrintBoolFalseString: .asciiz "false"
.text
_PrintString:
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $a0, 4($fp) # fill a from $fp+4
li $v0, 4
syscall
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_Alloc:
subu $sp, $sp, 8 # decrement sp to make space to save ra,fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $a0, 4($fp) # fill a from $fp+4
li $v0, 9
syscall
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_Halt:
li $v0, 10
syscall
# EndFunc
_StringEqual:
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
lw $a0, 4($fp) # fill a from $fp+4
lw $a1, 8($fp) # fill a from $fp+8
beq $a0,$a1,Lrunt10
Lrunt12:
lbu $v0,($a0)
lbu $a2,($a1)
bne $v0,$a2,Lrunt11
addiu $a0,$a0,1
addiu $a1,$a1,1
bne $v0,$0,Lrunt12
li $v0,1
j Lrunt10
Lrunt11:
li $v0,0
Lrunt10:
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
_ReadLine:
subu $sp, $sp, 8 # decrement sp to make space to save ra, fp
sw $fp, 8($sp) # save fp
sw $ra, 4($sp) # save ra
addiu $fp, $sp, 8 # set up new fp
li $a0, 101
li $v0, 9
syscall
addi $a0, $v0, 0
li $v0, 8
li $a1,101
syscall
addiu $v0,$a0,0 # pointer to begin of string
Lrunt21:
lb $a1,($a0) # load character at pointer
addiu $a0,$a0,1 # forward pointer
bnez $a1,Lrunt21 # loop until end of string is reached
lb $a1,-2($a0) # load character before end of string
li $a2,10 # newline character bneq $a1,$a2,Lrunt20 # do not remove last character if not newline
sb $0,-2($a0) # Add the terminating character in its place
Lrunt20:
# EndFunc
# (below handles reaching end of fn body with no explicit return)
move $sp, $fp # pop callee frame off stack
lw $ra, -4($fp) # restore saved ra
lw $fp, 0($fp) # restore saved fp
jr $ra # return from function
| 34.424307
| 114
| 0.58913
| 5,592
| 32,290
| 3.346567
| 0.054542
| 0.018809
| 0.026077
| 0.055413
| 0.809822
| 0.782195
| 0.770332
| 0.749546
| 0.730523
| 0.715293
| 0
| 0.080213
| 0.284577
| 32,290
| 937
| 115
| 34.461046
| 0.729882
| 0
| 0
| 0.790592
| 0
| 0
| 0.002157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b5b3d7c8aca7921f1170518280bfeea65485f6c4
| 240
|
py
|
Python
|
icevision/models/mmdet/models/__init__.py
|
Addono/icevision
|
5207de65f780735bdf8ed9d4d01ce72ad852aa9c
|
[
"Apache-2.0"
] | null | null | null |
icevision/models/mmdet/models/__init__.py
|
Addono/icevision
|
5207de65f780735bdf8ed9d4d01ce72ad852aa9c
|
[
"Apache-2.0"
] | null | null | null |
icevision/models/mmdet/models/__init__.py
|
Addono/icevision
|
5207de65f780735bdf8ed9d4d01ce72ad852aa9c
|
[
"Apache-2.0"
] | null | null | null |
# object detection
from icevision.models.mmdet.models import faster_rcnn
from icevision.models.mmdet.models import retinanet
from icevision.models.mmdet.models import fcos
# segmentation
from icevision.models.mmdet.models import mask_rcnn
| 30
| 53
| 0.85
| 33
| 240
| 6.121212
| 0.393939
| 0.257426
| 0.376238
| 0.475248
| 0.712871
| 0.712871
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091667
| 240
| 7
| 54
| 34.285714
| 0.926606
| 0.120833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8d5793e9296a259f6081cf7ad7cfcc01ad7a3b87
| 20,236
|
py
|
Python
|
tests/test_segmentclient.py
|
YiweiLi4/tensorbay-python-sdk
|
20fc8b37bad01c2d201a87b1436f1d4ff77d76df
|
[
"MIT"
] | null | null | null |
tests/test_segmentclient.py
|
YiweiLi4/tensorbay-python-sdk
|
20fc8b37bad01c2d201a87b1436f1d4ff77d76df
|
[
"MIT"
] | null | null | null |
tests/test_segmentclient.py
|
YiweiLi4/tensorbay-python-sdk
|
20fc8b37bad01c2d201a87b1436f1d4ff77d76df
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# Copyright 2021 Graviti. Licensed under MIT License.
#
"""This file defines class TestSegmentClient"""
import pytest
import ulid
from tensorbay import __version__
from tensorbay.client.gas import GAS
from tensorbay.dataset.data import Data, Label
from tensorbay.dataset.frame import Frame
from tensorbay.exception import FrameError, ResponseError
from tensorbay.label.catalog import Catalog
from tensorbay.sensor.sensor import Sensor, Sensors
from .utility import get_random_dataset_name
CATALOG = {
"BOX2D": {
"categories": [
{"name": "01"},
{"name": "02"},
{"name": "03"},
{"name": "04"},
{"name": "05"},
{"name": "06"},
{"name": "07"},
{"name": "08"},
{"name": "09"},
{"name": "10"},
{"name": "11"},
{"name": "12"},
{"name": "13"},
{"name": "14"},
{"name": "15"},
],
"attributes": [
{"name": "Vertical angle", "enum": [-90, -60, -30, -15, 0, 15, 30, 60, 90]},
{
"name": "Horizontal angle",
"enum": [-90, -75, -60, -45, -30, -15, 0, 15, 30, 45, 60, 75, 90],
},
{"name": "Serie", "enum": [1, 2]},
{"name": "Number", "type": "integer", "minimum": 0, "maximum": 92},
],
}
}
LABEL = {
"BOX2D": [
{
"category": "01",
"attributes": {"Vertical angle": -90, "Horizontal angle": 60, "Serie": 1, "Number": 5},
"box2d": {"xmin": 639.85, "ymin": 175.24, "xmax": 667.59, "ymax": 200.41},
}
]
}
NEW_LABEL = {
"BOX2D": [
{
"category": "01",
"attributes": {"Vertical angle": -60, "Horizontal angle": 60, "Serie": 1, "Number": 5},
"box2d": {"xmin": 639.85, "ymin": 175.24, "xmax": 667.59, "ymax": 200.41},
}
]
}
WRONG_LABEL = {
"BOX2D": [
{
"category": "01",
"attributes": {"Vertical angle": -75, "Horizontal angle": 60, "Serie": 1, "Number": 5},
"box2d": {"xmin": 639.85, "ymin": 175.24, "xmax": 667.59, "ymax": 200.41},
}
]
}
LIDAR_DATA = {
"name": "Lidar1",
"type": "LIDAR",
"extrinsics": {
"translation": {"x": 1, "y": 2, "z": 3},
"rotation": {"w": 1.0, "x": 2.0, "y": 3.0, "z": 4.0},
},
}
RADAR_DATA = {
"name": "Radar1",
"type": "RADAR",
"extrinsics": {
"translation": {"x": 1, "y": 2, "z": 3},
"rotation": {"w": 1.0, "x": 2.0, "y": 3.0, "z": 4.0},
},
}
class TestSegmentClient:
"""Test SegmentClient class."""
def test_upload_file(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
data = segment_client.list_data()
assert data[0].path == "hello0.txt"
assert data[0].open().read() == b"CONTENT"
assert not data[0].label
gas_client.delete_dataset(dataset_name)
def test_replace_file(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
# Replace files
for i in range(5):
local_path = path / f"hello{i}.txt"
local_path.write_text("ADD CONTENT")
segment_client.upload_file(local_path=str(local_path))
data = segment_client.list_data()
assert data[0].path == "hello0.txt"
assert data[0].open().read() == b"ADD CONTENT"
assert not data[0].label
gas_client.delete_dataset(dataset_name)
def test_add_file(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
# Add files
for i in range(5):
local_path = path / f"goodbye{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
data = segment_client.list_data()
assert data[0].path == "goodbye0.txt"
assert data[5].path == "hello0.txt"
gas_client.delete_dataset(dataset_name)
def test_list_file_order(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
# Upload files in reverse order
for i in reversed(range(5)):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
# Add files in reverse order
for i in reversed(range(5)):
local_path = path / f"goodbye{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
data = segment_client.list_data()
assert data[0].path == "goodbye0.txt"
assert data[5].path == "hello0.txt"
gas_client.delete_dataset(dataset_name)
def test_list_data_paths(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
# Add other files in reverse order
for i in reversed(range(5)):
local_path = path / f"goodbye{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_file(local_path=str(local_path))
data_paths = segment_client.list_data_paths()
assert data_paths[0] == "goodbye0.txt"
assert data_paths[5] == "hello0.txt"
gas_client.delete_dataset(dataset_name)
@pytest.mark.xfail(__version__ < "1.5.0", reason="not supported at least until v1.5.0")
def test_upload_label(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
dataset_client.upload_catalog(Catalog.loads(CATALOG))
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
local_path = path / "hello0.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
# If not uploading file, uploading label is not allowed
with pytest.raises(ResponseError):
segment_client.upload_label(data)
# Uploading files
segment_client.upload_file(data.path, data.target_remote_path)
data.label = Label.loads(WRONG_LABEL)
# Uploading wrong label is not allowed
with pytest.raises(ResponseError):
segment_client.upload_label(data)
data.label = Label.loads(LABEL)
segment_client.upload_label(data)
data = segment_client.list_data()
assert data[0].path == "hello0.txt"
assert data[0].label
# todo: match the input and output label
gas_client.delete_dataset(dataset_name)
def test_replace_label(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
dataset_client.upload_catalog(Catalog.loads(CATALOG))
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
local_path = path / "hello0.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
segment_client.upload_file(data.path, data.target_remote_path)
data.label = Label.loads(LABEL)
segment_client.upload_label(data)
# Replace labels
data.label = Label.loads(NEW_LABEL)
segment_client.upload_label(data)
data = segment_client.list_data()
assert data[0].path == "hello0.txt"
assert data[0].label
# todo: match the input and output label
gas_client.delete_dataset(dataset_name)
def test_upload_label_without_catalog(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
local_path = path / "hello0.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
segment_client.upload_file(data.path, data.target_remote_path)
# If not uploading catalog, uploading label is not allowed
data.label = Label.loads(LABEL)
with pytest.raises(ResponseError):
segment_client.upload_label(data)
gas_client.delete_dataset(dataset_name)
def test_upload_data_without_label(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
segment_client.upload_data(Data(local_path=str(local_path)))
data = segment_client.list_data()
assert data[0].path == "hello0.txt"
assert data[0].open().read() == b"CONTENT"
assert not data[0].label
# todo: match the input and output label
gas_client.delete_dataset(dataset_name)
def test_upload_data_with_label(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
dataset_client.upload_catalog(Catalog.loads(CATALOG))
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment_client.upload_data(data)
data = segment_client.list_data()
assert data[0].path == "hello0.txt"
assert data[0].open().read() == b"CONTENT"
assert data[0].label
# todo: match the input and output label
gas_client.delete_dataset(dataset_name)
def test_delete_data(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name)
dataset_client.create_draft("draft-1")
dataset_client.upload_catalog(Catalog.loads(CATALOG))
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
for i in range(10):
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
segment_client.upload_data(data)
segment_client.delete_data("hello0.txt")
data_paths = segment_client.list_data_paths()
assert "hello0.txt" not in data_paths
segment_client.delete_data(segment_client.list_data_paths())
data = segment_client.list_data()
assert len(data) == 0
gas_client.delete_dataset(dataset_name)
def test_sensor(self, accesskey, url):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
segment_client.upload_sensor(Sensor.loads(LIDAR_DATA))
sensors = segment_client.get_sensors()
assert sensors == Sensors.loads([LIDAR_DATA])
segment_client.delete_sensor(LIDAR_DATA["name"])
sensors = segment_client.get_sensors()
assert len(sensors) == 0
gas_client.delete_dataset(dataset_name)
def test_upload_frame_without_label(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
segment_client.upload_sensor(Sensor.loads(LIDAR_DATA))
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
frame = Frame()
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
frame[LIDAR_DATA["name"]] = data
segment_client.upload_frame(frame, timestamp=i)
frames = segment_client.list_frames()
assert len(frames) == 5
assert frames[0][LIDAR_DATA["name"]].path == "hello0.txt"
assert not frames[0][LIDAR_DATA["name"]].label
gas_client.delete_dataset(dataset_name)
def test_upload_frame_without_sensor(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
path = tmp_path / "sub"
path.mkdir()
frame = Frame()
local_path = path / "hello0.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
frame[LIDAR_DATA["name"]] = data
# If not uploading sensor, uploading frame is not allowed
with pytest.raises(ResponseError):
segment_client.upload_frame(frame, timestamp=0)
gas_client.delete_dataset(dataset_name)
def test_upload_frame_with_label(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True)
dataset_client.create_draft("draft-1")
dataset_client.upload_catalog(Catalog.loads(CATALOG))
segment_client = dataset_client.get_or_create_segment("segment1")
segment_client.upload_sensor(Sensor.loads(LIDAR_DATA))
path = tmp_path / "sub"
path.mkdir()
for i in range(5):
frame = Frame()
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
data.label = Label.loads(LABEL)
frame[LIDAR_DATA["name"]] = data
segment_client.upload_frame(frame, timestamp=i)
frames = segment_client.list_frames()
assert len(frames) == 5
assert frames[0][LIDAR_DATA["name"]].path == "hello0.txt"
assert frames[0][LIDAR_DATA["name"]].label
# todo: match the input and output label
gas_client.delete_dataset(dataset_name)
def test_upload_frame_with_order(self, accesskey, url, tmp_path):
gas_client = GAS(access_key=accesskey, url=url)
dataset_name = get_random_dataset_name()
dataset_client = gas_client.create_dataset(dataset_name, is_fusion=True)
dataset_client.create_draft("draft-1")
segment_client = dataset_client.get_or_create_segment("segment1")
segment_client.upload_sensor(Sensor.loads(LIDAR_DATA))
path = tmp_path / "sub"
path.mkdir()
# If noe setting frame id in frame, set timestamp(order) when uploading
for i in reversed(range(5)):
frame = Frame()
local_path = path / f"hello{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
frame[LIDAR_DATA["name"]] = data
segment_client.upload_frame(frame, timestamp=i)
# Set frame id in frame
for i in range(5, 10):
frame = Frame(frame_id=ulid.from_timestamp(i))
local_path = path / f"goodbye{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
frame[LIDAR_DATA["name"]] = data
segment_client.upload_frame(frame)
# Both setting frame id in frame and set timestamp(order) when uploading are not allowed
i = 10
frame = Frame(frame_id=ulid.from_timestamp(i))
local_path = path / f"goodbye{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
frame[LIDAR_DATA["name"]] = data
with pytest.raises(FrameError):
segment_client.upload_frame(frame, timestamp=i)
# Neither setting frame id in frame nor set timestamp(order) when uploading is not allowed
frame = Frame()
local_path = path / f"goodbye{i}.txt"
local_path.write_text("CONTENT")
data = Data(local_path=str(local_path))
frame[LIDAR_DATA["name"]] = data
with pytest.raises(FrameError):
segment_client.upload_frame(frame)
frames = segment_client.list_frames()
assert len(frames) == 10
assert frames[0][LIDAR_DATA["name"]].path == "hello0.txt"
assert frames[5][LIDAR_DATA["name"]].path == "goodbye5.txt"
assert not frames[0][LIDAR_DATA["name"]].label
# todo: match the input and output label
gas_client.delete_dataset(dataset_name)
| 37.474074
| 99
| 0.627397
| 2,562
| 20,236
| 4.689696
| 0.074941
| 0.065918
| 0.04794
| 0.053933
| 0.883978
| 0.860508
| 0.848356
| 0.836787
| 0.822472
| 0.811402
| 0
| 0.020829
| 0.255041
| 20,236
| 539
| 100
| 37.543599
| 0.776186
| 0.049022
| 0
| 0.750594
| 0
| 0
| 0.083867
| 0
| 0
| 0
| 0
| 0.001855
| 0.085511
| 1
| 0.038005
| false
| 0
| 0.023753
| 0
| 0.064133
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d7698812124b4af1fd0645be09eff294af4c075
| 33,036
|
py
|
Python
|
equations/radiation_fluxes.py
|
Trung-Hieu-Tran/CEAOS
|
916c0d186fed0ac827ed3f5dc8ad50fe056154bb
|
[
"MIT"
] | null | null | null |
equations/radiation_fluxes.py
|
Trung-Hieu-Tran/CEAOS
|
916c0d186fed0ac827ed3f5dc8ad50fe056154bb
|
[
"MIT"
] | null | null | null |
equations/radiation_fluxes.py
|
Trung-Hieu-Tran/CEAOS
|
916c0d186fed0ac827ed3f5dc8ad50fe056154bb
|
[
"MIT"
] | null | null | null |
"""
Note:
PAR: Photosynthetically active radiation
FIR: Far infrared radiation
NIR: Near infrared radiation
"""
from equations.heat_fluxes import *
from equations.lumped_cover_layers import *
def canopy_PAR_absorbed(states: States, setpoints: Setpoints, weather: Weather):
"""The PAR absorbed by the canopy
Equation 8.26
:return: The PAR absorbed by the canopy [W m^-2]
"""
return canopy_PAR_absorbed_from_greenhouse_cover(states, setpoints, weather) + canopy_PAR_absorbed_from_greenhouse_floor(states, setpoints, weather)
def canopy_PAR_absorbed_from_greenhouse_cover(states: States, setpoints: Setpoints, weather: Weather):
# Equation 8.27
radiation_flux_PARGh = PAR_above_canopy(states, setpoints, weather)
canopy_PAR_reflection_coefficient = Constants.canopy_PAR_reflection_coefficient
canopy_PAR_extinction_coefficient = Constants.canopy_PAR_extinction_coefficient
return radiation_flux_PARGh * (1 - canopy_PAR_reflection_coefficient) * (1 - math.exp(-canopy_PAR_extinction_coefficient * states.leaf_area_index))
def canopy_PAR_absorbed_from_greenhouse_floor(states: States, setpoints: Setpoints, weather: Weather):
# Equation 8.29
radiation_flux_PARGh = PAR_above_canopy(states, setpoints, weather)
canopy_PAR_extinction_coefficient = Constants.canopy_PAR_extinction_coefficient
floor_PAR_extinction_coefficient = Constants.floor_PAR_extinction_coefficient
floor_PAR_reflection_coefficient = Coefficients.Floor.floor_PAR_reflection_coefficient
canopy_PAR_reflection_coefficient = Constants.canopy_PAR_reflection_coefficient
return radiation_flux_PARGh * (1 - math.exp(-canopy_PAR_extinction_coefficient * states.leaf_area_index)) * floor_PAR_reflection_coefficient * (1 - canopy_PAR_reflection_coefficient) * (1 - math.exp(-floor_PAR_extinction_coefficient * states.leaf_area_index))
def floor_NIR_absorbed(states: States, setpoints: Setpoints, weather: Weather):
# Equation 8.34
# TODO: need to re-verify the order of four cover layers and cover-canopy-floor
shScr_NIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_NIR_transmission_coefficient # line 155 / setGlParams / GreenLight
shScr_NIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_NIR_reflection_coefficient # line 152 / setGlParams / GreenLight
# NIR reflection coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_NIR_reflection_coefficient = roof_thermal_screen_NIR_reflection_coefficient(setpoints)
# Vanthoor NIR reflection coefficient of the lumped cover
cover_NIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_NIR_transmission_coefficient, shScr_NIR_reflection_coefficient, roof_thScr_NIR_reflection_coefficient)
virtual_NIR_reflection_canopy_coef = canopy_virtual_NIR_reflection_coefficient(states)
floor_NIR_reflection_coefficient = Coefficients.Floor.floor_NIR_reflection_coefficient
virtual_NIR_transmission_cover_coef = lumped_cover_virtual_NIR_transmission_coefficients(cover_NIR_reflection_coefficient)
virtual_NIR_transmission_canopy_coef = canopy_virtual_NIR_transmission_coefficient(states)
virtual_NIR_transmission_floor_coef = floor_virtual_NIR_transmission_coefficients()
# NIR transmission coefficient of the cover and canopy
cover_can_NIR_transmission_coefficient = double_layer_cover_transmission_coefficient(virtual_NIR_transmission_cover_coef, virtual_NIR_transmission_canopy_coef, cover_NIR_reflection_coefficient, virtual_NIR_reflection_canopy_coef) # line 380 / setGlAux / GreenLight
# NIR reflection coefficient of the cover and canopy
cover_can_NIR_reflection_coefficient = double_layer_cover_reflection_coefficient(virtual_NIR_transmission_cover_coef, cover_NIR_reflection_coefficient, virtual_NIR_reflection_canopy_coef) # line 383, 386 / setGlAux / GreenLight
# NIR transmission coefficient of the cover, canopy and floor
cover_can_floor_NIR_transmission_coefficient = double_layer_cover_transmission_coefficient(cover_can_NIR_transmission_coefficient, virtual_NIR_transmission_floor_coef, cover_can_NIR_reflection_coefficient, floor_NIR_reflection_coefficient) # line 389 / setGlAux / GreenLight
# NIR absorption coefficient of the floor
NIR_absorption_floor_coef = cover_can_floor_NIR_transmission_coefficient # page 213
ratio_GlobAir = Coefficients.Construction.ratio_GlobAir
ratio_GlobNIR = Constants.ratio_GlobNIR
outdoor_global_rad = weather.outdoor_global_rad
return (1 - ratio_GlobAir) * NIR_absorption_floor_coef * ratio_GlobNIR * outdoor_global_rad
def floor_PAR_absorbed(states: States, setpoints: Setpoints, weather: Weather):
# Equation 8.35
floor_PAR_reflection_coefficient = Coefficients.Floor.floor_PAR_reflection_coefficient
canopy_PAR_extinction_coefficient = Constants.canopy_PAR_extinction_coefficient
radiation_flux_PARGh = PAR_above_canopy(states, setpoints, weather)
return (1 - floor_PAR_reflection_coefficient) * math.exp(-canopy_PAR_extinction_coefficient * states.leaf_area_index) * radiation_flux_PARGh
def PAR_above_canopy(states: States, setpoints: Setpoints, weather: Weather):
"""The PAR above the canopy
The model contains four cover layers:
+ A movable outdoor shading screen
+ A semi-permanent shading screen
+ The greenhouse roof
+ A movable indoor thermal screen
Equation 8.28
:return: the PAR above the canopy [W m^-2]
"""
# TODO: need to re-verify the order of four cover layers
shScr_PAR_transmission_coefficient = Coefficients.Shadowscreen.shScr_PAR_transmission_coefficient # line 156 / setGlParams / GreenLight
shScr_PAR_reflection_coefficient = Coefficients.Shadowscreen.shScr_PAR_reflection_coefficient # line 153 / setGlParams / GreenLight
# PAR transmission coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_PAR_transmission_coefficient = roof_thermal_screen_PAR_transmission_coefficient(setpoints)
# PAR reflection coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_PAR_reflection_coefficient = roof_thermal_screen_PAR_reflection_coefficient(setpoints)
# Vanthoor PAR transmission coefficient of the lumped cover
cover_PAR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_PAR_transmission_coefficient, roof_thScr_PAR_transmission_coefficient, shScr_PAR_reflection_coefficient, roof_thScr_PAR_reflection_coefficient)
ratio_GlobAir = Coefficients.Construction.ratio_GlobAir
ratio_GlobPAR = Constants.ratio_GlobPAR
outdoor_global_rad = weather.outdoor_global_rad
return (1 - ratio_GlobAir) * cover_PAR_transmission_coefficient * ratio_GlobPAR * outdoor_global_rad
def canopy_NIR_absorbed(states: States, setpoints: Setpoints, weather: Weather):
"""The NIR absorbed by the canopy
The model contains four cover layers:
+ A movable outdoor shading screen
+ A semi-permanent shading screen
+ The greenhouse roof
+ A movable indoor thermal screen
Equation 8.33
:return: The NIR absorbed by the canopy [W m^-2]
"""
# TODO: need to re-verify the order of four cover layers and cover-canopy-floor
shScr_NIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_NIR_transmission_coefficient # line 155 / setGlParams / GreenLight
shScr_NIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_NIR_reflection_coefficient # line 152 / setGlParams / GreenLight
# NIR reflection coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_NIR_reflection_coefficient = roof_thermal_screen_NIR_reflection_coefficient(setpoints)
# Vanthoor NIR reflection coefficient of the lumped cover
cover_NIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_NIR_transmission_coefficient, shScr_NIR_reflection_coefficient, roof_thScr_NIR_reflection_coefficient)
virtual_NIR_reflection_canopy_coef = canopy_virtual_NIR_reflection_coefficient(states)
floor_NIR_reflection_coefficient = Coefficients.Floor.floor_NIR_reflection_coefficient
virtual_NIR_transmission_cover_coef = lumped_cover_virtual_NIR_transmission_coefficients(cover_NIR_reflection_coefficient)
virtual_NIR_transmission_canopy_coef = canopy_virtual_NIR_transmission_coefficient(states)
virtual_NIR_transmission_floor_coef = floor_virtual_NIR_transmission_coefficients()
# NIR transmission coefficient of the cover and canopy
cover_can_NIR_transmission_coefficient = double_layer_cover_transmission_coefficient(virtual_NIR_transmission_cover_coef, virtual_NIR_transmission_canopy_coef, cover_NIR_reflection_coefficient, virtual_NIR_reflection_canopy_coef) # line 380 / setGlAux / GreenLight
# NIR reflection coefficient of the cover and canopy
cover_can_NIR_reflection_coefficient = double_layer_cover_reflection_coefficient(virtual_NIR_transmission_cover_coef, cover_NIR_reflection_coefficient, virtual_NIR_reflection_canopy_coef) # line 383, 386 / setGlAux / GreenLight
# NIR transmission coefficient of the cover, canopy and floor
cover_can_floor_NIR_transmission_coefficient = double_layer_cover_transmission_coefficient(cover_can_NIR_transmission_coefficient, virtual_NIR_transmission_floor_coef, cover_can_NIR_reflection_coefficient, floor_NIR_reflection_coefficient) # line 389 / setGlAux / GreenLight
# NIR reflection coefficient of the cover, canopy and floor
cover_can_floor_NIR_reflection_coefficient = double_layer_cover_reflection_coefficient(cover_can_NIR_transmission_coefficient, cover_can_NIR_reflection_coefficient, floor_NIR_reflection_coefficient) # line 392 / setGlAux / GreenLight
# NIR absorption coefficient of the canopy
NIR_absorption_canopy_coef = 1 - cover_can_floor_NIR_transmission_coefficient - cover_can_floor_NIR_reflection_coefficient # page 213
ratio_GlobAir = Coefficients.Construction.ratio_GlobAir
ratio_GlobNIR = Constants.ratio_GlobNIR
outdoor_global_rad = weather.outdoor_global_rad
return (1 - ratio_GlobAir) * NIR_absorption_canopy_coef * ratio_GlobNIR * outdoor_global_rad
def FIR_from_pipe_to_canopy(states: States):
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
A_Pipe = math.pi * pipe_length * phi_external_pipe
pipe_FIR_emission_coefficient = Coefficients.Heating.pipe_FIR_emission_coefficient
can_FIR_emission_coefficient = Constants.can_FIR_emission_coefficient
F_PipeCan = 0.49 * (1 - math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index))
sigma = Constants.sigma
pipe_t = states.pipe_t
can_t = states.can_t
return net_far_infrared_radiation_fluxes(A_Pipe, pipe_FIR_emission_coefficient, can_FIR_emission_coefficient, F_PipeCan, sigma, pipe_t, can_t)
def FIR_from_canopy_to_internal_cover(states: States, setpoints: Setpoints):
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
sigma = Constants.sigma
can_t = states.can_t
internal_cov_t = states.internal_cov_t
can_FIR_emission_coefficient = Constants.can_FIR_emission_coefficient
A_Can = 1 - math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
cover_FIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 260 / setGlAux / GreenLight
epsilon_Cov = 1 - cover_FIR_transmission_coefficient - cover_FIR_reflection_coefficient # = a_CovFIR, line 271 / setGlAux
tau_U_ThScrFIR = thermal_screen_FIR_transmission_coefficient(setpoints)
F_CanCov_in = tau_U_ThScrFIR
return net_far_infrared_radiation_fluxes(A_Can, can_FIR_emission_coefficient, epsilon_Cov, F_CanCov_in, sigma, can_t, internal_cov_t)
def FIR_from_canopy_to_floor(states: States):
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
sigma = Constants.sigma
can_t = states.can_t
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
can_FIR_emission_coefficient = Constants.can_FIR_emission_coefficient
A_Can = 1 - math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
floor_FIR_emission_coefficient = Coefficients.Floor.floor_FIR_emission_coefficient
F_CanFlr = 1 - 0.49 * math.pi * pipe_length * phi_external_pipe
floor_t = states.floor_t
return net_far_infrared_radiation_fluxes(A_Can, can_FIR_emission_coefficient, floor_FIR_emission_coefficient, F_CanFlr, sigma, can_t, floor_t)
def FIR_from_canopy_to_sky(states: States, setpoints: Setpoints, weather: Weather):
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
sky_FIR_emission_coefficient = Constants.sky_FIR_emission_coefficient
can_FIR_emission_coefficient = Constants.can_FIR_emission_coefficient
sigma = Constants.sigma
can_t = states.can_t
A_Can = 1 - math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
tau_U_ThScrFIR = thermal_screen_FIR_transmission_coefficient(setpoints)
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
F_CanSky = cover_FIR_transmission_coefficient * tau_U_ThScrFIR
sky_t = weather.sky_t
return net_far_infrared_radiation_fluxes(A_Can, can_FIR_emission_coefficient, sky_FIR_emission_coefficient, F_CanSky, sigma, can_t, sky_t)
def FIR_from_canopy_to_thermal_screen(states: States, setpoints: Setpoints):
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
can_FIR_emission_coefficient = Constants.can_FIR_emission_coefficient
sigma = Constants.sigma
thScr_FIR_emission_coefficient = Coefficients.Thermalscreen.thScr_FIR_emission_coefficient
F_CanThScr = setpoints.U_ThScr
can_t = states.can_t
A_Can = 1 - math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
thermal_screen_t = states.thermal_screen_t
return net_far_infrared_radiation_fluxes(A_Can, can_FIR_emission_coefficient, thScr_FIR_emission_coefficient, F_CanThScr, sigma, can_t, thermal_screen_t)
def FIR_from_heating_pipe_to_floor(states: States):
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
floor_FIR_emission_coefficient = Coefficients.Floor.floor_FIR_emission_coefficient
pipe_FIR_emission_coefficient = Coefficients.Heating.pipe_FIR_emission_coefficient
sigma = Constants.sigma
pipe_t = states.pipe_t
floor_t = states.floor_t
A_Pipe = math.pi * pipe_length * phi_external_pipe
F_PipeFlr = 0.49
return net_far_infrared_radiation_fluxes(A_Pipe, pipe_FIR_emission_coefficient, floor_FIR_emission_coefficient, F_PipeFlr, sigma, pipe_t, floor_t)
def FIR_from_floor_to_internal_cover(states: States, setpoints: Setpoints):
A_Flr = 1
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
floor_FIR_emission_coefficient = Coefficients.Floor.floor_FIR_emission_coefficient
sigma = Constants.sigma
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
cover_FIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 260 / setGlAux / GreenLight
epsilon_Cov = 1 - cover_FIR_transmission_coefficient - cover_FIR_reflection_coefficient # = a_CovFIR, line 271 / setGlAux
floor_t = states.floor_t
tau_U_ThScrFIR = thermal_screen_FIR_transmission_coefficient(setpoints)
F_FlrCov_in = tau_U_ThScrFIR * (1 - 0.49 * math.pi * pipe_length * phi_external_pipe) * math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
internal_cov_t = states.internal_cov_t
return net_far_infrared_radiation_fluxes(A_Flr, floor_FIR_emission_coefficient, epsilon_Cov, F_FlrCov_in, sigma, floor_t, internal_cov_t)
def FIR_from_floor_to_sky(states: States, setpoints: Setpoints, weather: Weather):
A_Flr = 1
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
sky_FIR_emission_coefficient = Constants.sky_FIR_emission_coefficient
floor_FIR_emission_coefficient = Coefficients.Floor.floor_FIR_emission_coefficient
sigma = Constants.sigma
floor_t = states.floor_t
tau_U_ThScrFIR = thermal_screen_FIR_transmission_coefficient(setpoints)
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
tau_CovFIR = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient,
roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
F_FlrSky = tau_CovFIR * tau_U_ThScrFIR * (1 - 0.49 * math.pi * pipe_length * phi_external_pipe) * math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
sky_t = weather.sky_t
return net_far_infrared_radiation_fluxes(A_Flr, floor_FIR_emission_coefficient, sky_FIR_emission_coefficient, F_FlrSky, sigma, floor_t, sky_t)
def FIR_from_floor_to_thermal_screen(states: States, setpoints: Setpoints):
A_Flr = 1
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
floor_FIR_emission_coefficient = Coefficients.Floor.floor_FIR_emission_coefficient
thScr_FIR_emission_coefficient = Coefficients.Thermalscreen.thScr_FIR_emission_coefficient
sigma = Constants.sigma
floor_t = states.floor_t
F_FlrThScr = setpoints.U_ThScr * (1 - 0.49 * math.pi * pipe_length * phi_external_pipe) * math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
thermal_screen_t = states.thermal_screen_t
return net_far_infrared_radiation_fluxes(A_Flr, floor_FIR_emission_coefficient, thScr_FIR_emission_coefficient, F_FlrThScr, sigma, floor_t, thermal_screen_t)
def FIR_from_heating_pipe_to_thermal_screen(states: States, setpoints: Setpoints):
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
A_Pipe = math.pi * pipe_length * phi_external_pipe
pipe_FIR_emission_coefficient = Coefficients.Heating.pipe_FIR_emission_coefficient
thScr_FIR_emission_coefficient = Coefficients.Thermalscreen.thScr_FIR_emission_coefficient
sigma = Constants.sigma
F_PipeThScr = setpoints.U_ThScr * 0.49 * math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
pipe_t = states.pipe_t
thermal_screen_t = states.thermal_screen_t
return net_far_infrared_radiation_fluxes(A_Pipe, pipe_FIR_emission_coefficient, thScr_FIR_emission_coefficient, F_PipeThScr, sigma, pipe_t, thermal_screen_t)
def FIR_from_thermal_screen_to_internal_cover(states: States, setpoints: Setpoints):
A_ThScr = 1
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
cover_FIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 260 / setGlAux / GreenLight
epsilon_Cov = 1 - cover_FIR_transmission_coefficient - cover_FIR_reflection_coefficient # = a_CovFIR, line 271 / setGlAux
thScr_FIR_emission_coefficient = Coefficients.Thermalscreen.thScr_FIR_emission_coefficient
sigma = Constants.sigma
F_ThScrCov_in = setpoints.U_ThScr
thermal_screen_t = states.thermal_screen_t
internal_cov_t = states.internal_cov_t
return net_far_infrared_radiation_fluxes(A_ThScr, thScr_FIR_emission_coefficient, epsilon_Cov, F_ThScrCov_in, sigma, thermal_screen_t, internal_cov_t)
def FIR_from_thermal_screen_to_sky(states: States, setpoints: Setpoints, weather: Weather):
A_ThScr = 1
sky_FIR_emission_coefficient = Constants.sky_FIR_emission_coefficient
thScr_FIR_emission_coefficient = Coefficients.Thermalscreen.thScr_FIR_emission_coefficient
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
sigma = Constants.sigma
F_ThScrSky = cover_FIR_transmission_coefficient * setpoints.U_ThScr
thermal_screen_t = states.thermal_screen_t
sky_t = weather.sky_t
return net_far_infrared_radiation_fluxes(A_ThScr, thScr_FIR_emission_coefficient, sky_FIR_emission_coefficient, F_ThScrSky, sigma, thermal_screen_t, sky_t)
def FIR_from_heating_pipe_to_internal_cover(states: States, setpoints: Setpoints):
sigma = Constants.sigma
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
cover_FIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 260 / setGlAux / GreenLight
epsilon_Cov = 1 - cover_FIR_transmission_coefficient - cover_FIR_reflection_coefficient # = a_CovFIR, line 271 / setGlAux
A_Pipe = math.pi * pipe_length * phi_external_pipe
pipe_FIR_emission_coefficient = Coefficients.Heating.pipe_FIR_emission_coefficient
tau_U_ThScrFIR = thermal_screen_FIR_transmission_coefficient(setpoints)
F_PipeCov_in = tau_U_ThScrFIR * 0.49 * math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
pipe_t = states.pipe_t
internal_cov_t = states.internal_cov_t
return net_far_infrared_radiation_fluxes(A_Pipe, pipe_FIR_emission_coefficient, epsilon_Cov, F_PipeCov_in, sigma, pipe_t, internal_cov_t)
def cover_global_radiation(states: States, setpoints: Setpoints, weather: Weather):
# Equation 8.37
# TODO: need to re-verify the order of four cover layers and cover-canopy-floor
shScr_PAR_transmission_coefficient = Coefficients.Shadowscreen.shScr_PAR_transmission_coefficient # line 156 / setGlParams / GreenLight
shScr_PAR_reflection_coefficient = Coefficients.Shadowscreen.shScr_PAR_reflection_coefficient # line 153 / setGlParams / GreenLight
# PAR transmission coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_PAR_transmission_coefficient = roof_thermal_screen_PAR_transmission_coefficient(setpoints)
# PAR reflection coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_PAR_reflection_coefficient = roof_thermal_screen_PAR_reflection_coefficient(setpoints)
# Vanthoor PAR transmission coefficient of the lumped cover
cover_PAR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_PAR_transmission_coefficient, roof_thScr_PAR_transmission_coefficient, shScr_PAR_reflection_coefficient, roof_thScr_PAR_reflection_coefficient)
# Vanthoor PAR reflection coefficient of the lumped cover
cover_PAR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_PAR_transmission_coefficient, shScr_PAR_reflection_coefficient, roof_thScr_PAR_reflection_coefficient)
shScr_NIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_NIR_transmission_coefficient # line 155 / setGlParams / GreenLight
shScr_NIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_NIR_reflection_coefficient # line 152 / setGlParams / GreenLight
# NIR transmission coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_NIR_transmission_coefficient = roof_thermal_screen_NIR_transmission_coefficient(setpoints)
# NIR reflection coefficient of the movable shading screen and the semi-permanent shading screen
roof_thScr_NIR_reflection_coefficient = roof_thermal_screen_NIR_reflection_coefficient(setpoints)
# Vanthoor NIR transmission coefficient of the lumped cover
cover_NIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_NIR_transmission_coefficient, roof_thScr_NIR_transmission_coefficient, shScr_NIR_reflection_coefficient, roof_thScr_NIR_reflection_coefficient)
# Vanthoor NIR reflection coefficient of the lumped cover
cover_NIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_NIR_transmission_coefficient, shScr_NIR_reflection_coefficient, roof_thScr_NIR_reflection_coefficient)
PAR_absorption_cover_coef = absorption_coefficient(cover_PAR_transmission_coefficient, cover_PAR_reflection_coefficient)
NIR_absorption_cover_coef = absorption_coefficient(cover_NIR_transmission_coefficient, cover_NIR_reflection_coefficient)
ratio_GlobPAR = Constants.ratio_GlobPAR
ratio_GlobNIR = Constants.ratio_GlobNIR
outdoor_global_rad = weather.outdoor_global_rad
return (PAR_absorption_cover_coef * ratio_GlobPAR + NIR_absorption_cover_coef * ratio_GlobNIR) * outdoor_global_rad
def FIR_from_external_cover_to_sky(states: States, weather: Weather):
A_Cov_e = 1
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
cover_FIR_reflection_coefficient = double_layer_cover_reflection_coefficient(shScr_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 260 / setGlAux / GreenLight
epsilon_Cov = 1 - cover_FIR_transmission_coefficient - cover_FIR_reflection_coefficient # = a_CovFIR, line 271 / setGlAux
sky_FIR_emission_coefficient = Constants.sky_FIR_emission_coefficient
F_Cov_e_Sky = 1
sigma = Constants.sigma
external_cov_t = states.external_cov_t
sky_t = weather.sky_t
return net_far_infrared_radiation_fluxes(A_Cov_e, epsilon_Cov, sky_FIR_emission_coefficient, F_Cov_e_Sky, sigma, external_cov_t, sky_t)
def FIR_from_heating_pipe_to_sky(states: States, setpoints: Setpoints, weather: Weather):
pipe_length = Coefficients.Heating.pipe_length
phi_external_pipe = Coefficients.Heating.phi_external_pipe
canopy_FIR_extinction_coefficient = Constants.canopy_FIR_extinction_coefficient
A_Pipe = math.pi * pipe_length * phi_external_pipe
pipe_FIR_emission_coefficient = Coefficients.Heating.pipe_FIR_emission_coefficient
sky_FIR_emission_coefficient = Constants.sky_FIR_emission_coefficient
shScr_FIR_transmission_coefficient = Coefficients.Shadowscreen.shScr_FIR_transmission_coefficient
shScr_FIR_reflection_coefficient = Coefficients.Shadowscreen.shScr_FIR_reflection_coefficient
roof_FIR_transmission_coefficient = Coefficients.Roof.roof_FIR_transmission_coefficient
roof_FIR_reflection_coefficient = Coefficients.Roof.roof_FIR_reflection_coefficient
cover_FIR_transmission_coefficient = double_layer_cover_transmission_coefficient(shScr_FIR_transmission_coefficient, roof_FIR_transmission_coefficient, shScr_FIR_reflection_coefficient, roof_FIR_reflection_coefficient) # line 255 / setGlAux / GreenLight
tau_U_ThScrFIR = thermal_screen_FIR_transmission_coefficient(setpoints)
F_PipeSky = cover_FIR_transmission_coefficient * tau_U_ThScrFIR * 0.49 * math.exp(-canopy_FIR_extinction_coefficient * states.leaf_area_index)
sigma = Constants.sigma
pipe_t = states.pipe_t
sky_t = weather.sky_t
return net_far_infrared_radiation_fluxes(A_Pipe, pipe_FIR_emission_coefficient, sky_FIR_emission_coefficient, F_PipeSky, sigma, pipe_t, sky_t)
def net_far_infrared_radiation_fluxes(A_i, ep_i, ep_j, F_ij, sigma, object_i_t, object_j_t) -> float:
"""The net far infrared radiation fluxes from surface ‘i’ to ‘j’
Equation 8.38
:param float A_i: the surface of object ‘i’ per square meter greenhouse soil
:param float ep_i, ep_j : the thermal infrared emission coefficients for object ‘i’ and ‘j’ respectively
:param float F_ij: the view factor from object ‘i’ to ‘j’
:param float sigma: the Stefan Boltzmann constant
:param float T_i, T_j: the temperatures of object ‘i’ and ‘j’ respectively
:return: The net far infrared radiation fluxes from surface ‘i’ to ‘j’ [W m^-2]
"""
return A_i * ep_i * ep_j * F_ij * sigma * ((object_i_t + 273.15) ** 4 - (object_j_t + 273.15) ** 4)
| 72.447368
| 279
| 0.840023
| 4,176
| 33,036
| 6.144875
| 0.042146
| 0.140758
| 0.08207
| 0.03858
| 0.937999
| 0.922801
| 0.900978
| 0.87331
| 0.827092
| 0.795137
| 0
| 0.007545
| 0.113331
| 33,036
| 455
| 280
| 72.606593
| 0.868497
| 0.135277
| 0
| 0.748387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006593
| 0
| 1
| 0.077419
| false
| 0
| 0.006452
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d8db6fde9196dd03aea23673710673cb09813a7
| 8,425
|
py
|
Python
|
tests/integration/test_tox_no_deps_int.py
|
stanislavlevin/tox-no-deps
|
5d5973d320a00c50cefff3b620e61598edaf6393
|
[
"MIT"
] | null | null | null |
tests/integration/test_tox_no_deps_int.py
|
stanislavlevin/tox-no-deps
|
5d5973d320a00c50cefff3b620e61598edaf6393
|
[
"MIT"
] | 2
|
2021-08-24T07:13:16.000Z
|
2021-08-24T07:23:32.000Z
|
tests/integration/test_tox_no_deps_int.py
|
stanislavlevin/tox-no-deps
|
5d5973d320a00c50cefff3b620e61598edaf6393
|
[
"MIT"
] | null | null | null |
import os
import sys
import subprocess
import pytest
NO_DEPS_SKIP_TEMPLATE = "no-deps plugin: deps: '{}' will be skipped for 'python'"
NO_EXTRAS_SKIP_TEMPLATE = "no-deps plugin: extras: '{}' will be skipped for 'python'"
def test_no_plugin_usage(initproj, cmd):
"""Plugin doesn't break regular tox"""
initproj(
"pkg123",
filedefs={
"tox.ini": """
[tox]
skipsdist = True
[testenv]
commands=python -c "print('test')"
""",
},
)
result = cmd()
result.assert_success()
def test_no_plugin_usage_deps(initproj, cmd):
initproj(
"pkg123",
filedefs={
"tox.ini": """
[tox]
skipsdist = True
[testenv]
deps =
somenotexisted_package1 == 9.9.9
commands=python -c "print('test')"
""",
},
)
result = cmd()
result.assert_fail()
assert (
"No matching distribution found for somenotexisted_package1==9.9.9"
in result.out
)
def test_no_plugin_usage_extras(initproj, cmd):
initproj(
"pkg123",
filedefs={
"setup.cfg": """\
[metadata]
name = pkg123
description = pkg123 project
version = 0.0.1
license = MIT
platforms = unix
[options]
packages = find:
[options.packages.find]
where = .
[options.extras_require]
tests =
somenotexisted_package1 == 9.9.9
""",
"setup.py": """\
from setuptools import setup
if __name__ == "__main__":
setup()
""",
"tox.ini": """
[tox]
[testenv]
extras =
tests
usedevelop=true
commands=python -c "print('test')"
""",
},
)
result = cmd()
result.assert_fail()
assert (
"No matching distribution found for somenotexisted_package1==9.9.9"
in result.out
)
def test_deps_skipped(initproj, cmd):
initproj(
"pkg123",
filedefs={
"tox.ini": """
[tox]
skipsdist = True
[testenv]
deps =
somenotexisted_package1 == 9.9.9
somenotexisted_package2
commands=python -c "print('test')"
"""
},
)
result = cmd("--no-deps", "-v")
result.assert_success()
assert (
NO_DEPS_SKIP_TEMPLATE.format(
"[somenotexisted_package1 == 9.9.9, somenotexisted_package2]"
)
in result.outlines
)
def test_extras_skipped(initproj, cmd):
initproj(
"pkg123",
filedefs={
"setup.cfg": """\
[metadata]
name = pkg123
description = pkg123 project
version = 0.0.1
license = MIT
platforms = unix
[options]
packages = find:
[options.packages.find]
where = .
[options.extras_require]
tests =
somenotexisted_package1 == 9.9.9
somenotexisted_package2
""",
"setup.py": """\
from setuptools import setup
if __name__ == "__main__":
setup()
""",
"tox.ini": """
[tox]
[testenv]
extras =
tests
usedevelop=true
commands=python -c "print('test')"
""",
},
)
result = cmd("--no-deps", "-v")
result.assert_success()
assert NO_EXTRAS_SKIP_TEMPLATE.format("['tests']") in result.outlines
def test_deps_extras_skipped(initproj, cmd):
initproj(
"pkg123",
filedefs={
"setup.cfg": """\
[metadata]
name = pkg123
description = pkg123 project
version = 0.0.1
license = MIT
platforms = unix
[options]
packages = find:
[options.packages.find]
where = .
[options.extras_require]
tests =
somenotexisted_package1 == 9.9.9
somenotexisted_package2
""",
"setup.py": """\
from setuptools import setup
if __name__ == "__main__":
setup()
""",
"tox.ini": """
[tox]
[testenv]
deps =
somenotexisted_package1 == 9.9.9
somenotexisted_package2
extras =
tests
usedevelop=true
commands=python -c "print('test')"
""",
},
)
result = cmd("--no-deps", "-v")
result.assert_success()
assert (
NO_DEPS_SKIP_TEMPLATE.format(
"[somenotexisted_package1 == 9.9.9, somenotexisted_package2]"
)
in result.outlines
)
assert NO_EXTRAS_SKIP_TEMPLATE.format("['tests']") in result.outlines
def test_deps_from_file_skipped(initproj, cmd):
initproj(
"pkg123",
filedefs={
"requirements.txt": """\
somenotexisted_package1 == 9.9.9
""",
"tox.ini": """
[tox]
[testenv]
deps =
-rrequirements.txt
somenotexisted_package2
usedevelop=true
commands=python -c "print('test')"
""",
},
)
result = cmd("--no-deps", "-v")
result.assert_success()
assert (
NO_DEPS_SKIP_TEMPLATE.format("[-rrequirements.txt, somenotexisted_package2]")
in result.outlines
)
def test_deps_from_env_skipped(initproj, cmd):
initproj(
"pkg123",
filedefs={
"requirements.txt": """\
somenotexisted_package1 == 9.9.9
""",
"requirements1.txt": """\
somenotexisted_package2 == 9.9.9
""",
"tox.ini": """
[tox]
[base]
deps =
-rrequirements.txt
somenotexisted_package3
[testenv]
deps =
{[base]deps}
-rrequirements1.txt
somenotexisted_package4
usedevelop=true
commands=python -c "print('test')"
""",
},
)
result = cmd("--no-deps", "-v")
result.assert_success()
assert (
NO_DEPS_SKIP_TEMPLATE.format(
"[-rrequirements.txt, somenotexisted_package3, -rrequirements1.txt,"
" somenotexisted_package4]"
)
in result.outlines
)
def test_package_deps_required(initproj, cmd):
"""Plugin doesn't mangle package deps"""
initproj(
"pkg123",
filedefs={
"setup.cfg": """\
[metadata]
name = pkg123
description = pkg123 project
version = 0.0.1
license = MIT
platforms = unix
[options]
packages = find:
install_requires =
somenotexisted_package == 9.9.9
[options.packages.find]
where = .
""",
"setup.py": """\
from setuptools import setup
if __name__ == "__main__":
setup()
""",
"tox.ini": """
[tox]
[testenv]
usedevelop=true
commands=python -c "print('test')"
""",
},
)
result = cmd("--no-deps")
result.assert_fail()
assert (
"No matching distribution found for somenotexisted_package==9.9.9" in result.out
)
| 26.083591
| 88
| 0.436558
| 660
| 8,425
| 5.383333
| 0.137879
| 0.016887
| 0.012665
| 0.081058
| 0.883479
| 0.811427
| 0.799606
| 0.785533
| 0.785533
| 0.737968
| 0
| 0.029753
| 0.457448
| 8,425
| 322
| 89
| 26.164596
| 0.747539
| 0.007953
| 0
| 0.795053
| 0
| 0
| 0.672337
| 0.097161
| 0
| 0
| 0
| 0
| 0.063604
| 1
| 0.031802
| false
| 0
| 0.028269
| 0
| 0.060071
| 0.031802
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5713ccbcc09db4a968b95ec08d505a559bedcbc8
| 219
|
py
|
Python
|
pymc/run_test.py
|
nikicc/anaconda-recipes
|
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
|
[
"BSD-3-Clause"
] | 302
|
2015-01-04T18:21:56.000Z
|
2021-11-16T12:14:37.000Z
|
pymc/run_test.py
|
nikicc/anaconda-recipes
|
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
|
[
"BSD-3-Clause"
] | 393
|
2015-01-03T14:35:48.000Z
|
2019-12-09T15:09:07.000Z
|
pymc/run_test.py
|
nikicc/anaconda-recipes
|
9c611a5854bf41bbc5e7ed9853dc71c0851a62ef
|
[
"BSD-3-Clause"
] | 325
|
2015-01-04T17:26:39.000Z
|
2021-11-04T16:25:54.000Z
|
import pymc
import pymc.Container_values
import pymc.LazyFunction
import pymc.flib
import pymc.gp.cov_funs.distances
import pymc.gp.cov_funs.isotropic_cov_funs
import pymc.gp.incomplete_chol
import pymc.gp.linalg_utils
| 24.333333
| 42
| 0.86758
| 36
| 219
| 5.083333
| 0.416667
| 0.437158
| 0.262295
| 0.163934
| 0.20765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073059
| 219
| 8
| 43
| 27.375
| 0.901478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5726d1d103622a9dc4884d10cf9c913230cea76d
| 1,600
|
py
|
Python
|
src/utils/fixtures/fouac__find_one.py
|
ozacas/spider-pi4-kafka-cluster
|
3d960b042763ffed17f2c39044cf6f0c22617004
|
[
"BSD-2-Clause"
] | 2
|
2020-05-27T19:09:35.000Z
|
2022-02-06T02:36:03.000Z
|
src/utils/fixtures/fouac__find_one.py
|
ozacas/spider-pi4-kafka-cluster
|
3d960b042763ffed17f2c39044cf6f0c22617004
|
[
"BSD-2-Clause"
] | null | null | null |
src/utils/fixtures/fouac__find_one.py
|
ozacas/spider-pi4-kafka-cluster
|
3d960b042763ffed17f2c39044cf6f0c22617004
|
[
"BSD-2-Clause"
] | null | null | null |
from bson import Binary
from bson.objectid import ObjectId
def fouac_find_one_expected_results():
return { '_id': ObjectId('fafafa' * 4), 'analysis_bytes': r'{"statements_by_count":{"FunctionNode":11,"StringLiteral":90,"VariableInitializer":23,"Scope":1,"KeywordLiteral":9,"AstRoot":1,"RegExpLiteral":6,"Assignment":40,"IfStatement":7,"ConditionalExpression":13,"ThrowStatement":2,"Block":11,"SwitchStatement":1,"ObjectLiteral":4,"ObjectProperty":9,"InfixExpression":117,"ExpressionStatement":10,"PropertyGet":62,"ReturnStatement":15,"ForLoop":3,"SwitchCase":5,"UnaryExpression":23,"ForInLoop":2,"Name":305,"NumberLiteral":19,"ArrayLiteral":1,"VariableDeclaration":7,"NewExpression":2,"FunctionCall":55,"ElementGet":8,"ParenthesizedExpression":16},"calls_by_count":{"f":5,"charCodeAt":2,"toString":2,"test":3,"getUTCMinutes":1,"apply":1,"valueOf":2,"getUTCDate":1,"walk":2,"String":3,"getUTCMonth":1,"join":4,"isFinite":2,"getUTCFullYear":1,"push":2,"eval":1,"toJSON":1,"replace":5,"quote":3,"call":4,"getUTCSeconds":1,"slice":2,"str":4,"getUTCHours":1},"literals_by_count":{"\b":1,"\t":1,"\n":3,"\f":1,"\r":1," ":1,"\"":5,"(":1,")":1,",":2,"-":2,"0":11,"1":4,"4":2,":":4,"@":1,"T":1,"Z":1,"[":1,"\\":1,"]":3,"{":1,"}":2,",\n":2,"null":4,"10":1,"16":2,": ":2,"[object Array]":1,"[\n":1,"\\\"":1,"[]":1,"\\\\":1,"\\b":1,"\\f":1,"\\n":1,"\\r":1,"\\t":1,"\\u":2,"0000":2,"{\n":1,"{}":1,"JSON.stringify":1,"number":3,"object":6,"function":7,"use strict":1,"string":4,"JSON.parse":1,"boolean":1},"id":"https://cdn.jsdelivr.net/gh/WordPress/WordPress@5.2.5//wp-includes/js/json2.min.js"}'.encode() }
| 228.571429
| 1,499
| 0.64625
| 246
| 1,600
| 4.154472
| 0.52439
| 0.015656
| 0.008806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088479
| 0.018125
| 1,600
| 6
| 1,500
| 266.666667
| 0.562062
| 0
| 0
| 0
| 0
| 0.25
| 0.904315
| 0.792996
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 10
|
574de97d40f826b38312cd44a2bfd370be894e53
| 46
|
py
|
Python
|
karr_lab_aws_manager/elasticsearch_kl/__init__.py
|
KarrLab/karr_lab_aws_manager_test_0
|
cd6f202328d93e8a0a91d4210b61bf8b3a9b518f
|
[
"MIT"
] | null | null | null |
karr_lab_aws_manager/elasticsearch_kl/__init__.py
|
KarrLab/karr_lab_aws_manager_test_0
|
cd6f202328d93e8a0a91d4210b61bf8b3a9b518f
|
[
"MIT"
] | 1
|
2020-02-25T03:15:00.000Z
|
2020-02-25T03:15:00.000Z
|
karr_lab_aws_manager/elasticsearch_kl/__init__.py
|
KarrLab/karr_lab_aws_manager
|
cd6f202328d93e8a0a91d4210b61bf8b3a9b518f
|
[
"MIT"
] | null | null | null |
from . import query_builder
from . import util
| 23
| 27
| 0.804348
| 7
| 46
| 5.142857
| 0.714286
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 28
| 23
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5759ae55900675df39d0a8d3c7b15d7df85c11ac
| 6,106
|
py
|
Python
|
tests/test_numpy_support.py
|
justengel/serial_json
|
c46f5e1a0a519b1df181c73421b7d89b41bf3c72
|
[
"MIT"
] | 4
|
2020-01-20T18:07:50.000Z
|
2021-11-19T23:32:25.000Z
|
tests/test_numpy_support.py
|
justengel/serial_json
|
c46f5e1a0a519b1df181c73421b7d89b41bf3c72
|
[
"MIT"
] | null | null | null |
tests/test_numpy_support.py
|
justengel/serial_json
|
c46f5e1a0a519b1df181c73421b7d89b41bf3c72
|
[
"MIT"
] | null | null | null |
def test_ndarray():
import numpy as np
import serial_json
import serial_json.bytes_support # Not needed in normal use
import serial_json.numpy_support # Not needed in normal use
n = np.array([(1, 2, 3, 4),
(5, 6, 7, 8)], dtype=np.dtype('<i4'))
obj = serial_json.loads(serial_json.dumps(n))
assert type(obj) == type(n)
assert np.all(obj == n)
assert obj.dtype == n.dtype
assert obj.shape == n.shape
n = np.array([(1.2, 2.3, 3.4, 4.5),
(5.6, 6.7, 7.8, 8.9)], dtype=np.dtype('<f4'))
obj = serial_json.loads(serial_json.dumps(n))
assert type(obj) == type(n)
assert np.all(obj == n)
assert obj.dtype == n.dtype
assert obj.shape == n.shape
def test_np_structured_array():
import numpy as np
import serial_json
import serial_json.bytes_support # Not needed in normal use
import serial_json.numpy_support # Not needed in normal use
n = np.array([(1, 2.3, True, 'abc'),
(5, 6.7, False, 'def')],
dtype=np.dtype([('a', '<i4'), ('b', '<f4'), ('c', '<u1'), ('d', '|O')]))
obj = serial_json.loads(serial_json.dumps(n))
assert type(obj) == type(n)
assert np.all(obj == n)
assert obj.dtype == n.dtype
assert obj.shape == n.shape
def test_np_recarray():
import numpy as np
import serial_json
import serial_json.bytes_support # Not needed in normal use
import serial_json.numpy_support # Not needed in normal use
n = np.recarray((2,), dtype=np.dtype([('a', '<i4'), ('b', '<f4'), ('c', '<u1'), ('d', '|O')]))
n[0] = (1, 2.3, True, 'abc')
n[1] = (5, 6.7, False, 'def')
obj = serial_json.loads(serial_json.dumps(n))
assert type(obj) == type(n)
assert np.all(obj == n)
assert obj.dtype == n.dtype
assert obj.shape == n.shape
def time_numpy_array(test_runs=1000):
import timeit
import numpy as np
import json
import serial_json
import serial_json.bytes_support # Not needed in normal use
import serial_json.numpy_support # Not needed in normal use
n = np.array([(1.2, 2.3, 3.4, 4.5),
(5.6, 6.7, 7.8, 8.9)] * 100, dtype=np.dtype('<f4'))
# n_list = json.dumps(n.tolist())
# n_serial = serial_json.dumps(n)
def run_serial_json():
serial_json.dumps(n)
def run_list():
json.dumps(n.tolist())
t1 = timeit.timeit(run_serial_json, number=test_runs)
print('Serial JSON DUMPS (size {}): '.format(n.size), t1)
t2 = timeit.timeit(run_list, number=test_runs)
print('List JSON DUMPS (size {}): '.format(n.size), t2)
# ===== Larger size =====
n = np.array([(1.2, 2.3, 3.4, 4.5),
(5.6, 6.7, 7.8, 8.9)] * 10000, dtype=np.dtype('<f4'))
# n_list = json.dumps(n.tolist())
# n_serial = serial_json.dumps(n)
def run_serial_json():
serial_json.dumps(n)
def run_list():
json.dumps(n.tolist())
t1 = timeit.timeit(run_serial_json, number=test_runs)
print('Serial JSON DUMPS (size {}): '.format(n.size), t1)
t2 = timeit.timeit(run_list, number=test_runs)
print('List JSON DUMPS (size {}): '.format(n.size), t2)
# ===== str to numpy =====
n = np.array([(1.2, 2.3, 3.4, 4.5),
(5.6, 6.7, 7.8, 8.9)] * 10000, dtype=np.dtype('<f4'))
n_list = json.dumps(n.tolist())
n_serial = serial_json.dumps(n)
def run_serial_json():
serial_json.loads(n_serial)
def run_list():
np.array(json.loads(n_list))
t1 = timeit.timeit(run_serial_json, number=test_runs)
print('Serial JSON LOADS (size {}): '.format(n.size), t1)
t2 = timeit.timeit(run_list, number=test_runs)
print('List JSON LOADS (size {}): '.format(n.size), t2)
def time_np_recarray(test_runs=1000):
import timeit
import numpy as np
import json
import serial_json
import serial_json.bytes_support # Not needed in normal use
import serial_json.numpy_support # Not needed in normal use
dtype = np.dtype([('a', '<i4'), ('b', '<f4'), ('c', '<u1'), ('d', '|O')])
n = np.recarray((100,), dtype=dtype)
# n_list = json.dumps(n.tolist())
# n_serial = serial_json.dumps(n)
def run_serial_json():
serial_json.dumps(n)
def run_list():
json.dumps(n.tolist())
t1 = timeit.timeit(run_serial_json, number=test_runs)
print('Serial JSON DUMPS (size {}): '.format(n.size), t1)
t2 = timeit.timeit(run_list, number=test_runs)
print('List JSON DUMPS (size {}): '.format(n.size), t2)
dtype = np.dtype([('a', '<i4'), ('b', '<f4'), ('c', '<u1'), ('d', '|O')])
n = np.recarray((10000,), dtype=dtype)
n_list = json.dumps(n.tolist())
n_serial = serial_json.dumps(n)
def run_serial_json():
serial_json.dumps(n)
def run_list():
json.dumps(n.tolist())
t1 = timeit.timeit(run_serial_json, number=test_runs)
print('Serial JSON DUMPS (size {}): '.format(n.size), t1)
t2 = timeit.timeit(run_list, number=test_runs)
print('List JSON DUMPS (size {}): '.format(n.size), t2)
# ===== str to numpy =====
dtype = np.dtype([('a', '<i4'), ('b', '<f4'), ('c', '<u1'), ('d', '|O')])
n = np.recarray((10000,), dtype=dtype)
n_list = json.dumps(n.tolist())
n_serial = serial_json.dumps(n)
def run_serial_json():
serial_json.loads(n_serial)
def run_list():
li = json.loads(n_list)
arr = np.recarray((len(li)), dtype=dtype)
for i, v in enumerate(li):
arr[dtype.names[0]][i] = v[0]
arr[dtype.names[1]][i] = v[1]
arr[dtype.names[2]][i] = v[2]
arr[dtype.names[3]][i] = v[3]
t1 = timeit.timeit(run_serial_json, number=test_runs)
print('Serial JSON LOADS (size {}): '.format(n.size), t1)
t2 = timeit.timeit(run_list, number=test_runs)
print('List JSON LOADS (size {}): '.format(n.size), t2)
if __name__ == '__main__':
test_ndarray()
test_np_structured_array()
test_np_recarray()
time_numpy_array()
time_np_recarray()
print('All tests finished successfully!')
| 31.968586
| 98
| 0.587783
| 944
| 6,106
| 3.654661
| 0.085805
| 0.153623
| 0.069565
| 0.064928
| 0.884928
| 0.873623
| 0.873623
| 0.873623
| 0.873623
| 0.873623
| 0
| 0.036942
| 0.233049
| 6,106
| 190
| 99
| 32.136842
| 0.699765
| 0.084343
| 0
| 0.775362
| 0
| 0
| 0.085832
| 0
| 0
| 0
| 0
| 0
| 0.115942
| 1
| 0.123188
| false
| 0
| 0.173913
| 0
| 0.297101
| 0.094203
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
93b655483683f9dd11ce8459cc4a1f492bfd9887
| 11,436
|
py
|
Python
|
tests/test_server_20g_cookie_handler.py
|
IdentityPython/idpy-oidc
|
44f78f5f70d0c5ddc0108fa9a241c460179b53a8
|
[
"Apache-2.0"
] | 1
|
2022-03-24T23:39:22.000Z
|
2022-03-24T23:39:22.000Z
|
tests/test_server_20g_cookie_handler.py
|
IdentityPython/idpy-oidc
|
44f78f5f70d0c5ddc0108fa9a241c460179b53a8
|
[
"Apache-2.0"
] | null | null | null |
tests/test_server_20g_cookie_handler.py
|
IdentityPython/idpy-oidc
|
44f78f5f70d0c5ddc0108fa9a241c460179b53a8
|
[
"Apache-2.0"
] | null | null | null |
import pytest
from cryptojwt.jwk.hmac import SYMKey
from idpyoidc.server.cookie_handler import CookieHandler
from idpyoidc.server.cookie_handler import compute_session_state
from tests import CRYPT_CONFIG
KEYDEFS = [
{"type": "OCT", "kid": "sig", "use": ["sig"]},
{"type": "OCT", "kid": "enc", "use": ["enc"]},
]
class TestCookieSign(object):
@pytest.fixture(autouse=True)
def make_cookie_content_handler(self):
cookie_conf = {
"sign_key": SYMKey(k="ghsNKDDLshZTPn974nOsIGhedULrsqnsGoBFBLwUKuJhE2ch"),
}
self.cookie_handler = CookieHandler(**cookie_conf)
def test_init(self):
assert self.cookie_handler
def test_make_cookie_content(self):
_cookie_info = self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")
assert _cookie_info
assert set(_cookie_info.keys()) == {"name", "value", "samesite", "httponly", "secure"}
assert len(_cookie_info["value"].split("|")) == 3
def test_make_cookie_content_max_age(self):
_cookie_info = self.cookie_handler.make_cookie_content(
"idpyoidc.server", "value", "sso", max_age=3600
)
assert _cookie_info
assert set(_cookie_info.keys()) == {
"name",
"value",
"max-age",
"samesite",
"httponly",
"secure",
}
assert len(_cookie_info["value"].split("|")) == 3
def test_read_cookie_info(self):
_cookie_info = [self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")]
returned = [{"name": c["name"], "value": c["value"]} for c in _cookie_info]
_info = self.cookie_handler.parse_cookie("idpyoidc.server", returned)
assert len(_info) == 1
assert set(_info[0].keys()) == {"value", "type", "timestamp"}
assert _info[0]["value"] == "value"
assert _info[0]["type"] == "sso"
def test_mult_cookie(self):
_cookie = [
self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso"),
self.cookie_handler.make_cookie_content("idpyoidc.server", "session_state", "session"),
]
assert len(_cookie) == 2
_c_info = self.cookie_handler.parse_cookie("idpyoidc.server", _cookie)
assert len(_c_info) == 2
assert _c_info[0]["value"] == "value"
assert _c_info[0]["type"] == "sso"
assert _c_info[1]["value"] == "session_state"
assert _c_info[1]["type"] == "session"
class TestCookieHandlerSignEnc(object):
@pytest.fixture(autouse=True)
def make_cookie_handler(self):
cookie_conf = {
"sign_key": SYMKey(k="ghsNKDDLshZTPn974nOsIGhedULrsqnsGoBFBLwUKuJhE2ch"),
"enc_key": SYMKey(k="NXi6HD473d_YS4exVRn7z9z23mGmvU641MuvKqH0o7Y"),
}
self.cookie_handler = CookieHandler(**cookie_conf)
def test_make_cookie_content(self):
_cookie_info = self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")
assert _cookie_info
assert set(_cookie_info.keys()) == {"name", "value", "samesite", "httponly", "secure"}
assert len(_cookie_info["value"].split("|")) == 4
def test_make_cookie_content_max_age(self):
_cookie_info = self.cookie_handler.make_cookie_content(
"idpyoidc.server", "value", "sso", max_age=3600
)
assert _cookie_info
assert set(_cookie_info.keys()) == {
"name",
"value",
"max-age",
"samesite",
"httponly",
"secure",
}
assert len(_cookie_info["value"].split("|")) == 4
def test_read_cookie_info(self):
_cookie_info = [self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")]
returned = [{"name": c["name"], "value": c["value"]} for c in _cookie_info]
_info = self.cookie_handler.parse_cookie("idpyoidc.server", returned)
assert len(_info) == 1
assert set(_info[0].keys()) == {"value", "type", "timestamp"}
assert _info[0]["value"] == "value"
assert _info[0]["type"] == "sso"
def test_mult_cookie(self):
_cookie = [
self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso"),
self.cookie_handler.make_cookie_content("idpyoidc.server", "session_state", "session"),
]
assert len(_cookie) == 2
_c_info = self.cookie_handler.parse_cookie("idpyoidc.server", _cookie)
assert len(_c_info) == 2
assert _c_info[0]["value"] == "value"
assert _c_info[0]["type"] == "sso"
assert _c_info[1]["value"] == "session_state"
assert _c_info[1]["type"] == "session"
class TestCookieHandlerEnc(object):
@pytest.fixture(autouse=True)
def make_cookie_content_handler(self):
cookie_conf = {
"enc_key": SYMKey(k="NXi6HD473d_YS4exVRn7z9z23mGmvU641MuvKqH0o7Y"),
}
self.cookie_handler = CookieHandler(**cookie_conf)
def test_make_cookie_content(self):
_cookie_info = self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")
assert _cookie_info
assert set(_cookie_info.keys()) == {"name", "value", "samesite", "httponly", "secure"}
assert len(_cookie_info["value"].split("|")) == 4
def test_make_cookie_content_max_age(self):
_cookie_info = self.cookie_handler.make_cookie_content(
"idpyoidc.server", "value", "sso", max_age=3600
)
assert _cookie_info
assert set(_cookie_info.keys()) == {
"name",
"value",
"max-age",
"samesite",
"httponly",
"secure",
}
assert len(_cookie_info["value"].split("|")) == 4
def test_read_cookie_info(self):
_cookie_info = [self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")]
returned = [{"name": c["name"], "value": c["value"]} for c in _cookie_info]
_info = self.cookie_handler.parse_cookie("idpyoidc.server", returned)
assert len(_info) == 1
assert set(_info[0].keys()) == {"value", "type", "timestamp"}
assert _info[0]["value"] == "value"
assert _info[0]["type"] == "sso"
def test_mult_cookie(self):
_cookie = [
self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso"),
self.cookie_handler.make_cookie_content("idpyoidc.server", "session_state", "session"),
]
assert len(_cookie) == 2
_c_info = self.cookie_handler.parse_cookie("idpyoidc.server", _cookie)
assert len(_c_info) == 2
assert _c_info[0]["value"] == "value"
assert _c_info[0]["type"] == "sso"
assert _c_info[1]["value"] == "session_state"
assert _c_info[1]["type"] == "session"
class TestCookieHandlerSignEncKeys(object):
@pytest.fixture(autouse=True)
def make_cookie_handler(self):
cookie_conf = {
"keys": {
"private_path": "private/cookie_jwks.json",
"key_defs": KEYDEFS,
"read_only": False,
}
}
self.cookie_handler = CookieHandler(**cookie_conf)
def test_make_cookie_content(self):
_cookie_info = self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")
assert _cookie_info
assert set(_cookie_info.keys()) == {"name", "value", "samesite", "httponly", "secure"}
assert len(_cookie_info["value"].split("|")) == 4
def test_make_cookie_content_max_age(self):
_cookie_info = self.cookie_handler.make_cookie_content(
"idpyoidc.server", "value", "sso", max_age=3600
)
assert _cookie_info
assert set(_cookie_info.keys()) == {
"name",
"value",
"max-age",
"samesite",
"httponly",
"secure",
}
assert len(_cookie_info["value"].split("|")) == 4
def test_read_cookie_info(self):
_cookie_info = [self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")]
returned = [{"name": c["name"], "value": c["value"]} for c in _cookie_info]
_info = self.cookie_handler.parse_cookie("idpyoidc.server", returned)
assert len(_info) == 1
assert set(_info[0].keys()) == {"value", "type", "timestamp"}
assert _info[0]["value"] == "value"
assert _info[0]["type"] == "sso"
def test_mult_cookie(self):
_cookie = [
self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso"),
self.cookie_handler.make_cookie_content("idpyoidc.server", "session_state", "session"),
]
assert len(_cookie) == 2
_c_info = self.cookie_handler.parse_cookie("idpyoidc.server", _cookie)
assert len(_c_info) == 2
assert _c_info[0]["value"] == "value"
assert _c_info[0]["type"] == "sso"
assert _c_info[1]["value"] == "session_state"
assert _c_info[1]["type"] == "session"
def test_compute_session_state():
hv = compute_session_state("state", "salt", "client_id", "https://example.com/redirect")
assert hv == "d21113fbe4b54661ae45f3a3233b0f865ccc646af248274b6fa5664267540e29.salt"
class TestCookieHandlerFernetEnc(object):
@pytest.fixture(autouse=True)
def make_cookie_content_handler(self):
cookie_conf = {
"crypt_config": CRYPT_CONFIG,
}
self.cookie_handler = CookieHandler(**cookie_conf)
def test_make_cookie_content(self):
_cookie_info = self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")
assert _cookie_info
assert set(_cookie_info.keys()) == {"name", "value", "samesite", "httponly", "secure"}
assert len(_cookie_info["value"].split("|")) == 2
def test_make_cookie_content_max_age(self):
_cookie_info = self.cookie_handler.make_cookie_content(
"idpyoidc.server", "value", "sso", max_age=3600
)
assert _cookie_info
assert set(_cookie_info.keys()) == {
"name",
"value",
"max-age",
"samesite",
"httponly",
"secure",
}
assert len(_cookie_info["value"].split("|")) == 2
def test_read_cookie_info(self):
_cookie_info = [self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso")]
returned = [{"name": c["name"], "value": c["value"]} for c in _cookie_info]
_info = self.cookie_handler.parse_cookie("idpyoidc.server", returned)
assert len(_info) == 1
assert set(_info[0].keys()) == {"value", "type", "timestamp"}
assert _info[0]["value"] == "value"
assert _info[0]["type"] == "sso"
def test_mult_cookie(self):
_cookie = [
self.cookie_handler.make_cookie_content("idpyoidc.server", "value", "sso"),
self.cookie_handler.make_cookie_content("idpyoidc.server", "session_state", "session"),
]
assert len(_cookie) == 2
_c_info = self.cookie_handler.parse_cookie("idpyoidc.server", _cookie)
assert len(_c_info) == 2
assert _c_info[0]["value"] == "value"
assert _c_info[0]["type"] == "sso"
assert _c_info[1]["value"] == "session_state"
assert _c_info[1]["type"] == "session"
| 39.434483
| 99
| 0.605282
| 1,288
| 11,436
| 5.041925
| 0.071429
| 0.101632
| 0.10733
| 0.080844
| 0.915614
| 0.915614
| 0.904219
| 0.904219
| 0.896982
| 0.877887
| 0
| 0.018493
| 0.243442
| 11,436
| 289
| 100
| 39.570934
| 0.732085
| 0
| 0
| 0.819277
| 0
| 0
| 0.185292
| 0.024047
| 0
| 0
| 0
| 0
| 0.329317
| 1
| 0.108434
| false
| 0
| 0.02008
| 0
| 0.148594
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
93ba2eeed18bf53d908434cb9ca9de5270c28b2f
| 1,151
|
py
|
Python
|
problem_5/test_smallest_multiple.py
|
plilja/project-euler
|
646d1989cf15e903ef7e3c6e487284847d522ec9
|
[
"Apache-2.0"
] | null | null | null |
problem_5/test_smallest_multiple.py
|
plilja/project-euler
|
646d1989cf15e903ef7e3c6e487284847d522ec9
|
[
"Apache-2.0"
] | null | null | null |
problem_5/test_smallest_multiple.py
|
plilja/project-euler
|
646d1989cf15e903ef7e3c6e487284847d522ec9
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase
from smallest_multiple import *
class TestSmallestMultiple(TestCase):
def test_smallest_multiple_that_is_divisible_by_1_is_1(self):
self.assertEqual(smallest_multiple([1]), 1)
def test_smallest_multiple_that_is_divisible_by_1_and_2_is_2(self):
self.assertEqual(smallest_multiple([1, 2]), 2)
def test_smallest_multiple_that_is_divisible_by_1_through_3_is_6(self):
self.assertEqual(smallest_multiple([1, 2]), 2)
def test_smallest_multiple_that_is_divisible_by_1_through_6_is_60(self):
self.assertEqual(smallest_multiple(range(1, 7)), 60)
def test_smallest_multiple_that_is_divisible_by_4_6_and_8_is_24(self):
self.assertEqual(smallest_multiple([4, 6, 8]), 24)
def test_smallest_multiple_that_is_divisible_by_3_4_6_and_8_is_60(self):
self.assertEqual(smallest_multiple([3, 4, 6, 8]), 24)
def test_smallest_multiple_that_is_divisible_by_1_through_10_is_2520(self):
self.assertEqual(smallest_multiple(range(1, 11)), 2520)
def test_project_euler_input(self):
self.assertEqual(smallest_multiple(range(1, 21)), 232792560)
| 37.129032
| 79
| 0.773241
| 177
| 1,151
| 4.519774
| 0.20339
| 0.32
| 0.19
| 0.27
| 0.80375
| 0.7425
| 0.6975
| 0.49
| 0.44
| 0.3375
| 0
| 0.072581
| 0.138141
| 1,151
| 30
| 80
| 38.366667
| 0.733871
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.421053
| 1
| 0.421053
| false
| 0
| 0.105263
| 0
| 0.578947
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
f5619ddf164c9d0e947379ec5d82683d1aa547f0
| 71,649
|
py
|
Python
|
Modules/Image/gen_irtkForEachNaryVoxelFunction.py
|
kevin-keraudren/IRTK
|
ce329b7f58270b6c34665dcfe9a6e941649f3b94
|
[
"Apache-2.0"
] | 3
|
2018-10-04T19:32:36.000Z
|
2021-09-02T07:37:30.000Z
|
Modules/Image/gen_irtkForEachNaryVoxelFunction.py
|
kevin-keraudren/IRTK
|
ce329b7f58270b6c34665dcfe9a6e941649f3b94
|
[
"Apache-2.0"
] | null | null | null |
Modules/Image/gen_irtkForEachNaryVoxelFunction.py
|
kevin-keraudren/IRTK
|
ce329b7f58270b6c34665dcfe9a6e941649f3b94
|
[
"Apache-2.0"
] | 4
|
2016-03-17T02:55:00.000Z
|
2018-02-03T05:40:05.000Z
|
#! /usr/bin/env python
#
# The Image Registration Toolkit (IRTK)
#
# Copyright 2008-2015 Imperial College London
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
# ------------------------------------------------------------------------------
def to_nary_string(arity):
return str(arity) + '-ary'
# ------------------------------------------------------------------------------
def string_to_arity(arity):
arity = arity.lower()
if arity == 'unary': return 1
elif arity == 'binary': return 2
elif arity == 'ternary': return 3
elif arity == 'quaternary': return 4
elif arity == 'quinary': return 5
elif arity == 'senary': return 6
elif arity == 'septenary': return 7
elif arity == 'octary': return 8
elif arity == 'nonary': return 9
return -1
# ------------------------------------------------------------------------------
def arity_to_string(arity):
if arity == 1: return 'unary'
elif arity == 2: return 'binary'
elif arity == 3: return 'ternary'
elif arity == 4: return 'quaternary'
elif arity == 5: return 'quinary'
elif arity == 6: return 'senary'
elif arity == 7: return 'septenary'
elif arity == 8: return 'octary'
elif arity == 9: return 'nonary'
return to_nary_string(arity)
# ------------------------------------------------------------------------------
def to_valid_symbol_name(s):
return s.replace('-', '')
# ------------------------------------------------------------------------------
def get_source_name(arity):
return 'irtkForEach' + to_valid_symbol_name(arity_to_string(arity)).title() + 'VoxelFunction'
# ------------------------------------------------------------------------------
# parse command-line arguments
if len(sys.argv) != 3:
print "usage: " + sys.argv[0] + ' <arity> <file>'
sys.exit(1)
try:
arity = int(sys.argv[1])
except ValueError:
arity = string_to_arity(sys.argv[1])
if arity < 1:
sys.stderr.write('Input argument must be either arity as positive number or a string such as "unary" or "binary"!\n')
sys.exit(1)
f = open(sys.argv[2], 'w')
if not f:
sys.stderr.write('Failed to open file ' + sys.argv[2] + '!\n')
# ------------------------------------------------------------------------------
# source file header
source_name = get_source_name(arity)
f.write("""/* The Image Registration Toolkit (IRTK)
*
* Copyright 2008-2015 Imperial College London
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
/* ATTENTION: This source file has been automatically generated using the code
* generator gen_irtkForEachNaryVoxelFunction.py! This generator is
* invoked during CMake configuration of the build system when this
* source file is missing from the project.
*
* DO NOT modify this file manually. Instead, modify the code
* generator, remove any existing irtkForEach*VoxelFunction.h
* header file from the image++/include/ directory and then re-run
* CMake. This will invoke the code generator to re-generate the now
* missing source files. */
""")
# include guard
f.write('#ifndef _' + source_name + '_H\n')
f.write('#define _' + source_name + '_H\n')
# include statements
f.write("""
#include <irtkVoxelFunction.h>
""")
# ------------------------------------------------------------------------------
# generate ForEach function for each combination of const and non-const image arguments
def gencode(arity, num_const):
arity_string = arity_to_string(arity).title()
cfg = {}
# use last image as reference for image size and inside check as it is usually
# the output image
cfg['refim'] = 'im' + str(arity)
cfg['refp'] = 'p' + str(arity)
# other settings
if num_const == 0: cfg['num_const_comment'] = str(arity) + ' non-const'
elif num_const == arity: cfg['num_const_comment'] = str(arity) + ' const'
else: cfg['num_const_comment'] = str(num_const) + ' const, ' + str(arity - num_const) + ' non-const'
if arity > 1: cfg['num_const_comment'] += ' images'
else: cfg['num_const_comment'] += ' image'
cfg['class_name1'] = 'irtk' + arity_string + 'ForEachVoxelBody'
cfg['class_name2'] = 'irtk' + arity_string + 'ForEachVoxelIfBody'
if num_const > 0:
cfg['class_name1'] += '_'
cfg['class_name2'] += '_'
if num_const < arity:
cfg['class_name1'] += str(num_const)
cfg['class_name2'] += str(num_const)
cfg['class_name1'] += 'Const'
cfg['class_name2'] += 'Const'
cfg['member_declaration'] = ''
cfg['class_T'] = ''
cfg['T'] = ''
cfg['constructor_args1'] = ''
cfg['constructor_args2'] = ''
cfg['init_list'] = ''
cfg['copy_list'] = ''
cfg['init_pointers'] = ''
cfg['init_pointers_1D'] = ''
cfg['init_pointers_2D'] = ''
cfg['init_pointers_3D'] = ''
cfg['preincrement_pointers'] = ''
cfg['inc_pointers_col'] = ''
cfg['inc_pointers_row'] = ''
cfg['inc_pointers_page'] = ''
cfg['pargs'] = ''
cfg['imparams_by_reference'] = ''
cfg['imargs'] = ''
cfg['impargs'] = ''
for i in range(1, arity+1):
n = str(i)
if i > 1:
cfg['member_declaration'] += '\n' + (' ' * 2)
cfg['class_T'] += ', '
cfg['T'] += ', '
cfg['constructor_args1'] += ',\n' + (' ' * (2 + len(cfg['class_name1']) + 1))
cfg['constructor_args2'] += ',\n' + (' ' * (2 + len(cfg['class_name2']) + 1))
cfg['init_list'] += ', '
cfg['copy_list'] += ', '
cfg['init_pointers'] += '\n' + (' ' * 4)
cfg['init_pointers_1D'] += '\n' + (' ' * 4)
cfg['init_pointers_2D'] += '\n' + (' ' * 4)
cfg['init_pointers_3D'] += '\n' + (' ' * 4)
cfg['preincrement_pointers'] += ', '
cfg['inc_pointers_col'] += ', '
cfg['inc_pointers_row'] += ', '
cfg['inc_pointers_page'] += ', '
cfg['pargs'] += ', '
cfg['imparams_by_reference'] += ', '
cfg['imargs'] += ', '
cfg['impargs'] += ', '
if num_const > 0:
if i <= num_const:
cfg['member_declaration'] += 'const '
cfg['constructor_args1'] += 'const '
cfg['constructor_args2'] += 'const '
cfg['init_pointers'] += 'const '
cfg['init_pointers_1D'] += 'const '
cfg['init_pointers_2D'] += 'const '
cfg['init_pointers_3D'] += 'const '
cfg['imparams_by_reference'] += 'const '
else:
cfg['member_declaration'] += ' '
cfg['constructor_args1'] += ' '
cfg['constructor_args2'] += ' '
cfg['init_pointers'] += ' '
cfg['init_pointers_1D'] += ' '
cfg['init_pointers_2D'] += ' '
cfg['init_pointers_3D'] += ' '
cfg['member_declaration'] += 'irtkGenericImage<T' + n + '> &im' + n + ';'
cfg['imparams_by_reference'] += 'irtkGenericImage<T' + n + '> &im' + n
cfg['class_T'] += 'class T' + n
cfg['T'] += 'T' + n
cfg['constructor_args1'] += 'irtkGenericImage<T' + n + '> &im' + n
cfg['constructor_args2'] += 'irtkGenericImage<T' + n + '> &im' + n
cfg['init_list'] += 'im' + n + '(im' + n + ')'
cfg['copy_list'] += 'im' + n + '(o.im' + n + ')'
cfg['init_pointers'] += 'T' + n + ' *p' + n + ' = im' + n + '.IsEmpty() ? NULL : im' + n + '.GetPointerToVoxels();'
cfg['init_pointers_1D'] += 'T' + n + ' *p' + n + ' = im' + n + '.IsEmpty() ? NULL : im' + n + '.GetPointerToVoxels() + re.begin();'
cfg['init_pointers_2D'] += 'T' + n + ' *p' + n + ' = im' + n + '.IsEmpty() ? NULL : im' + n + '.GetPointerToVoxels(bi, bj, this->_k, this->_l);'
cfg['init_pointers_3D'] += 'T' + n + ' *p' + n + ' = im' + n + '.IsEmpty() ? NULL : im' + n + '.GetPointerToVoxels(bi, bj, bk, this->_l);'
cfg['preincrement_pointers'] += '++p' + n
cfg['inc_pointers_col'] += 'p' + n + ' += 1'
cfg['inc_pointers_row'] += 'p' + n + ' += s1'
cfg['inc_pointers_page'] += 'p' + n + ' += s2'
cfg['pargs'] += 'p' + n
cfg['imargs'] += 'im' + n
cfg['impargs'] += '*im' + n
cfg['constructor_args1'] += ',\n' + (' ' * (2 + len(cfg['class_name1']) + 1))
cfg['constructor_args2'] += ',\n' + (' ' * (2 + len(cfg['class_name2']) + 1))
cfg['constructor_args1'] += 'VoxelFunc &vf'
cfg['constructor_args2'] += 'VoxelFunc &vf, OutsideFunc &of'
cfg['imparams_by_pointer'] = cfg['imparams_by_reference'].replace('&', '*')
cfg['assert_is_not_reduction'] = "if (VoxelFunc::IsReduction()) _irtkforeach%svoxelfunction_must_not_be_reduction();" % arity_string.lower()
cfg['assert_neither_is_not_reduction'] = "if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) _irtkforeach%svoxelfunction_must_not_be_reduction();" % arity_string.lower()
f.write("""
// =============================================================================
// %(num_const_comment)s
// =============================================================================
// -----------------------------------------------------------------------------
/**
* ForEachVoxel body for voxel function of %(num_const_comment)s
*/
template <%(class_T)s, class VoxelFunc>
struct %(class_name1)s : public irtkForEachVoxelBody<VoxelFunc>
{
%(member_declaration)s
/// Constructor
%(class_name1)s(%(constructor_args1)s)
:
irtkForEachVoxelBody<VoxelFunc>(vf), %(init_list)s
{}
/// Copy constructor
%(class_name1)s(const %(class_name1)s &o)
:
irtkForEachVoxelBody<VoxelFunc>(o), %(copy_list)s
{}
/// Split constructor
%(class_name1)s(%(class_name1)s &o, split s)
:
irtkForEachVoxelBody<VoxelFunc>(o, s), %(copy_list)s
{}
/// Process entire image
void operator ()(const irtkImageAttributes &attr) const
{
%(init_pointers)s
const int T = (attr._dt ? attr._t : 1);
for (int l = 0; l < T; ++l)
for (int k = 0; k < attr._z; ++k)
for (int j = 0; j < attr._y; ++j)
for (int i = 0; i < attr._x; ++i, %(preincrement_pointers)s) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name1)s *>(this)->_VoxelFunc(i, j, k, l, %(pargs)s);
}
}
/// Process 1D image region
void operator ()(const blocked_range<int> &re) const
{
%(init_pointers_1D)s
for (int idx = re.begin(); idx < re.end(); ++idx, %(inc_pointers_col)s) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name1)s *>(this)->_VoxelFunc(%(refim)s, idx, %(pargs)s);
}
}
/// Process 2D image region
void operator ()(const blocked_range2d<int> &re) const
{
const int bi = re.cols().begin();
const int bj = re.rows().begin();
const int ei = re.cols().end();
const int ej = re.rows().end();
const int s1 = %(refim)s.GetX() - (ei - bi);
%(init_pointers_2D)s
for (int j = bj; j < ej; ++j, %(inc_pointers_row)s)
for (int i = bi; i < ei; ++i, %(inc_pointers_col)s) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name1)s *>(this)->_VoxelFunc(i, j, this->_k, this->_l, %(pargs)s);
}
}
/// Process 3D image region
void operator ()(const blocked_range3d<int> &re) const
{
const int bi = re.cols ().begin();
const int bj = re.rows ().begin();
const int bk = re.pages().begin();
const int ei = re.cols ().end();
const int ej = re.rows ().end();
const int ek = re.pages().end();
const int s1 = %(refim)s.GetX() - (ei - bi);
const int s2 = (%(refim)s.GetY() - (ej - bj)) * %(refim)s.GetX();
%(init_pointers_3D)s
for (int k = bk; k < ek; ++k, %(inc_pointers_page)s)
for (int j = bj; j < ej; ++j, %(inc_pointers_row)s)
for (int i = bi; i < ei; ++i, %(inc_pointers_col)s) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name1)s *>(this)->_VoxelFunc(i, j, k, this->_l, %(pargs)s);
}
}
};
// -----------------------------------------------------------------------------
/**
* ForEachVoxel body for inside and outside unary voxel function of %(num_const_comment)s
*/
template <%(class_T)s,
class VoxelFunc, class OutsideFunc = irtkNaryVoxelFunction::NOP,
class Domain = irtkImageDomain::Foreground>
struct %(class_name2)s : public irtkForEachVoxelIfBody<VoxelFunc, OutsideFunc>
{
%(member_declaration)s
/// Constructor
%(class_name2)s(%(constructor_args2)s)
:
irtkForEachVoxelIfBody<VoxelFunc, OutsideFunc>(vf, of), %(init_list)s
{}
/// Copy constructor
%(class_name2)s(const %(class_name2)s &o)
:
irtkForEachVoxelIfBody<VoxelFunc, OutsideFunc>(o), %(copy_list)s
{}
/// Split constructor
%(class_name2)s(%(class_name2)s &o, split s)
:
irtkForEachVoxelIfBody<VoxelFunc, OutsideFunc>(o, s), %(copy_list)s
{}
/// Process entire image
void operator ()(const irtkImageAttributes &attr) const
{
%(init_pointers)s
const int T = (attr._dt ? attr._t : 1);
for (int l = 0; l < T; ++l)
for (int k = 0; k < attr._z; ++k)
for (int j = 0; j < attr._y; ++j)
for (int i = 0; i < attr._x; ++i, %(preincrement_pointers)s) {
if (Domain::IsInside(%(refim)s, i, j, k, l, %(refp)s)) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name2)s *>(this)->_VoxelFunc (i, j, k, l, %(pargs)s);
} else const_cast<%(class_name2)s *>(this)->_OutsideFunc(i, j, k, l, %(pargs)s);
}
}
/// Process 1D image region
void operator ()(const blocked_range<int> &re) const
{
%(init_pointers_1D)s
for (int idx = re.begin(); idx < re.end(); ++idx, %(inc_pointers_col)s) {
if (Domain::IsInside(%(refim)s, idx, %(refp)s)) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name2)s *>(this)->_VoxelFunc (%(refim)s, idx, %(pargs)s);
} else const_cast<%(class_name2)s *>(this)->_OutsideFunc(%(refim)s, idx, %(pargs)s);
}
}
/// Process 2D image region
void operator ()(const blocked_range2d<int> &re) const
{
const int bi = re.cols().begin();
const int bj = re.rows().begin();
const int ei = re.cols().end();
const int ej = re.rows().end();
const int s1 = %(refim)s.GetX() - (ei - bi);
%(init_pointers_2D)s
for (int j = bj; j < ej; ++j, %(inc_pointers_row)s)
for (int i = bi; i < ei; ++i, %(inc_pointers_col)s) {
if (Domain::IsInside(%(refim)s, i, j, this->_k, this->_l, %(refp)s)) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name2)s *>(this)->_VoxelFunc (i, j, this->_k, this->_l, %(pargs)s);
} else const_cast<%(class_name2)s *>(this)->_OutsideFunc(i, j, this->_k, this->_l, %(pargs)s);
}
}
/// Process 3D image region
void operator ()(const blocked_range3d<int> &re) const
{
const int bi = re.cols ().begin();
const int bj = re.rows ().begin();
const int bk = re.pages().begin();
const int ei = re.cols ().end();
const int ej = re.rows ().end();
const int ek = re.pages().end();
const int s1 = %(refim)s.GetX() - (ei - bi);
const int s2 = (%(refim)s.GetY() - (ej - bj)) * %(refim)s.GetX();
%(init_pointers_3D)s
for (int k = bk; k < ek; ++k, %(inc_pointers_page)s)
for (int j = bj; j < ej; ++j, %(inc_pointers_row)s)
for (int i = bi; i < ei; ++i, %(inc_pointers_col)s) {
if (Domain::IsInside(%(refim)s, i, j, k, this->_l, %(refp)s)) {
// const_cast such that voxel functions need only implement
// non-const operator() which is required for parallel_reduce
const_cast<%(class_name2)s *>(this)->_VoxelFunc (i, j, k, this->_l, %(pargs)s);
} else const_cast<%(class_name2)s *>(this)->_OutsideFunc(i, j, k, this->_l, %(pargs)s);
}
}
};
// -----------------------------------------------------------------------------
// ForEachVoxel
// -----------------------------------------------------------------------------
//
// Image arguments by pointer
//
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachScalar(%(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels());
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachScalar(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachScalar(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(%(imparams_by_pointer)s, VoxelFunc &vf)
{
if (%(refim)s->GetTSize()) {
ForEachScalar(%(impargs)s, vf);
} else {
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels() / %(refim)s->GetT());
body(re);
vf.join(body._VoxelFunc);
}
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const irtkImageAttributes &attr, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
body(attr);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(attr, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const blocked_range<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const blocked_range<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(re, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const blocked_range2d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(re, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const blocked_range3d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(re, %(impargs)s, vf);
}
//
// Image arguments by reference
//
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachScalar(%(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels());
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachScalar(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachScalar(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(%(imparams_by_reference)s, VoxelFunc &vf)
{
if (%(refim)s.GetTSize()) {
ForEachScalar(%(imargs)s, vf);
} else {
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels() / %(refim)s.GetT());
body(re);
vf.join(body._VoxelFunc);
}
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const irtkImageAttributes &attr, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
body(attr);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(attr, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const blocked_range<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const blocked_range<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const blocked_range2d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(const blocked_range3d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
body(re);
vf.join(body._VoxelFunc);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ForEachVoxel(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxel(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
// ForEachVoxelIf
// -----------------------------------------------------------------------------
//
// Image arguments by pointer
//
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachScalarIf(%(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels());
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachScalarIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ForEachScalarIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachScalarIf(%(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachScalarIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachScalarIf(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachScalarIf<Domain>(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(%(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
if (%(refim)s->GetTSize()) {
ForEachScalarIf<Domain>(%(impargs)s, vf, of);
} else {
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels() / %(refim)s->GetT());
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(%(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
body(attr);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const irtkImageAttributes &attr, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(attr, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(attr, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(attr, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const blocked_range<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range<int> &re, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range2d<int> &re, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range3d<int> &re, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(impargs)s, vf);
}
//
// Image arguments by reference
//
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachScalarIf(%(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels());
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachScalarIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ForEachScalarIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachScalarIf(%(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachScalarIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachScalarIf(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachScalarIf<Domain>(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(%(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
if (%(refim)s.GetTSize()) {
ForEachVoxelIf<Domain>(%(imargs)s, vf, of);
} else {
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels() / %(refim)s.GetT());
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(%(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
body(attr);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const irtkImageAttributes &attr, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(attr, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(attr, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(attr, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const blocked_range<int> &re, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range<int> &re, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range2d<int> &re, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
body(re);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range3d<int> &re, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ForEachVoxelIf(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ForEachVoxelIf<Domain>(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
// ParallelForEachVoxel
// -----------------------------------------------------------------------------
//
// Image arguments by pointer
//
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachScalar(%(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels());
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachScalar(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachScalar(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(%(imparams_by_pointer)s, VoxelFunc &vf)
{
if (%(refim)s->GetTSize()) {
ParallelForEachScalar(%(impargs)s, vf);
} else {
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels() / %(refim)s->GetT());
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const irtkImageAttributes &attr, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
blocked_range3d<int> re(0, attr._z, 0, attr._y, 0, attr._x);
if (VoxelFunc::IsReduction()) {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_reduce(re, body);
} else {
parallel_reduce(re, body);
}
vf.join(body._VoxelFunc);
} else {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_for(re, body);
} else {
parallel_for(re, body);
}
}
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(attr, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const blocked_range<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const blocked_range<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(re, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const blocked_range2d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(re, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const blocked_range3d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(impargs)s, vf);
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(re, %(impargs)s, vf);
}
//
// Image arguments by reference
//
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachScalar(%(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels());
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachScalar(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachScalar(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(%(imparams_by_reference)s, VoxelFunc &vf)
{
if (%(refim)s.GetTSize()) {
ParallelForEachScalar(%(imargs)s, vf);
} else {
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels() / %(refim)s.GetT());
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const irtkImageAttributes &attr, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
blocked_range3d<int> re(0, attr._z, 0, attr._y, 0, attr._x);
if (VoxelFunc::IsReduction()) {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_reduce(re, body);
} else {
parallel_reduce(re, body);
}
vf.join(body._VoxelFunc);
} else {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_for(re, body);
} else {
parallel_for(re, body);
}
}
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(attr, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const blocked_range<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const blocked_range<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const blocked_range2d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(const blocked_range3d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
%(class_name1)s<%(T)s, VoxelFunc> body(%(imargs)s, vf);
if (VoxelFunc::IsReduction()) { parallel_reduce(re, body); vf.join(body._VoxelFunc); }
else parallel_for (re, body);
}
// -----------------------------------------------------------------------------
template <%(class_T)s, class VoxelFunc>
void ParallelForEachVoxel(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxel(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
// ParallelForEachVoxelIf
// -----------------------------------------------------------------------------
//
// Image arguments by pointer
//
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachScalarIf(%(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels());
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachScalarIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachScalarIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachScalarIf(%(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachScalarIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachScalarIf(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachScalarIf<Domain>(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(%(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
if (%(refim)s->GetTSize()) {
ParallelForEachVoxelIf<Domain>(%(impargs)s, vf, of);
} else {
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
blocked_range<int> re(0, %(refim)s->GetNumberOfVoxels() / %(refim)s->GetT());
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(%(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(%(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(%(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
blocked_range3d<int> re(0, attr._z, 0, attr._y, 0, attr._x);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_reduce(re, body);
} else {
parallel_reduce(re, body);
}
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_for(re, body);
} else {
parallel_for(re, body);
}
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const irtkImageAttributes &attr, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(attr, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(attr, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(attr, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const blocked_range<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range<int> &re, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const blocked_range<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const blocked_range<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range2d<int> &re, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(impargs)s, vf, of);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range3d<int> &re, %(imparams_by_pointer)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_pointer)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_pointer)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(impargs)s, vf);
}
//
// Image arguments by reference
//
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachScalarIf(%(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels());
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachScalarIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachScalarIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachScalarIf(%(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachScalarIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachScalarIf(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachScalarIf<Domain>(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(%(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
if (%(refim)s.GetTSize()) {
ParallelForEachVoxelIf<Domain>(%(imargs)s, vf, of);
} else {
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
blocked_range<int> re(0, %(refim)s.GetNumberOfVoxels() / %(refim)s.GetT());
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(%(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(%(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(%(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
blocked_range3d<int> re(0, attr._z, 0, attr._y, 0, attr._x);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_reduce(re, body);
} else {
parallel_reduce(re, body);
}
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
if (attr._dt) {
for (body._l = 0; body._l < attr._t; ++body._l) parallel_for(re, body);
} else {
parallel_for(re, body);
}
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const irtkImageAttributes &attr, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(attr, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const irtkImageAttributes &attr, %(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(attr, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const irtkImageAttributes &attr, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(attr, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const blocked_range<int> &re, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range<int> &re, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const blocked_range<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const blocked_range<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range2d<int> &re, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const blocked_range2d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const blocked_range2d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf, OutsideFunc &of)
{
%(class_name2)s<%(T)s, VoxelFunc, OutsideFunc, Domain> body(%(imargs)s, vf, of);
if (VoxelFunc::IsReduction() || OutsideFunc::IsReduction()) {
parallel_reduce(re, body);
vf.join(body._VoxelFunc);
of.join(body._OutsideFunc);
} else {
parallel_for(re, body);
}
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc, class OutsideFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, OutsideFunc of, const blocked_range3d<int> &re, %(imparams_by_reference)s)
{
%(assert_neither_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(const blocked_range3d<int> &re, %(imparams_by_reference)s, VoxelFunc &vf)
{
irtkNaryVoxelFunction::NOP of;
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf, of);
}
// -----------------------------------------------------------------------------
template <class Domain, %(class_T)s, class VoxelFunc>
void ParallelForEachVoxelIf(VoxelFunc vf, const blocked_range3d<int> &re, %(imparams_by_reference)s)
{
%(assert_is_not_reduction)s
ParallelForEachVoxelIf<Domain>(re, %(imargs)s, vf);
}
""" % cfg)
# ------------------------------------------------------------------------------
# main
f.write("""
inline void _irtkforeach%svoxelfunction_must_not_be_reduction()
{
cerr << "(Parallel)ForEachVoxel(If): Voxel reductions must be passed by reference!"
" Pass voxel functor object(s) as last argument(s) instead of first." << endl;
exit(1);
}
""" % arity_to_string(arity))
num_const = arity
while num_const >= 0:
gencode(arity, num_const)
num_const -= 1
# ------------------------------------------------------------------------------
# footer - end of include guard
f.write("""
#endif
""")
f.close()
| 38.792095
| 180
| 0.556546
| 7,662
| 71,649
| 5.032237
| 0.043461
| 0.009856
| 0.02578
| 0.044194
| 0.902612
| 0.894857
| 0.885961
| 0.882174
| 0.876624
| 0.86721
| 0
| 0.005068
| 0.137978
| 71,649
| 1,846
| 181
| 38.813109
| 0.619206
| 0.02409
| 0
| 0.548849
| 0
| 0.065339
| 0.928383
| 0.4384
| 0
| 0
| 0
| 0
| 0.044804
| 0
| null | null | 0.001245
| 0.000622
| null | null | 0.000622
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f57ba4e1eddd0e08acac2172b03750dcedb5f04b
| 4,519
|
py
|
Python
|
tests/unit/lib/utils/test_lock_distributor.py
|
zhuhaow/aws-sam-cli
|
59d82ec6848b5a0cdd544d8ada838d4d34052971
|
[
"Apache-2.0"
] | 2,959
|
2018-05-08T21:48:56.000Z
|
2020-08-24T14:35:39.000Z
|
tests/unit/lib/utils/test_lock_distributor.py
|
zhuhaow/aws-sam-cli
|
59d82ec6848b5a0cdd544d8ada838d4d34052971
|
[
"Apache-2.0"
] | 1,469
|
2018-05-08T22:44:28.000Z
|
2020-08-24T20:19:24.000Z
|
tests/unit/lib/utils/test_lock_distributor.py
|
zhuhaow/aws-sam-cli
|
59d82ec6848b5a0cdd544d8ada838d4d34052971
|
[
"Apache-2.0"
] | 642
|
2018-05-08T22:09:19.000Z
|
2020-08-17T09:04:37.000Z
|
from unittest import TestCase
from unittest.mock import MagicMock, call, patch
from samcli.lib.utils.lock_distributor import LockChain, LockDistributor, LockDistributorType
class TestLockChain(TestCase):
def test_aquire_order(self):
locks = {"A": MagicMock(), "B": MagicMock(), "C": MagicMock()}
call_mock = MagicMock()
call_mock.a = locks["A"]
call_mock.b = locks["B"]
call_mock.c = locks["C"]
lock_chain = LockChain(locks)
lock_chain.acquire()
call_mock.assert_has_calls([call.a.acquire(), call.b.acquire(), call.c.acquire()])
def test_aquire_order_shuffled(self):
locks = {"A": MagicMock(), "C": MagicMock(), "B": MagicMock()}
call_mock = MagicMock()
call_mock.a = locks["A"]
call_mock.b = locks["B"]
call_mock.c = locks["C"]
lock_chain = LockChain(locks)
lock_chain.acquire()
call_mock.assert_has_calls([call.a.acquire(), call.b.acquire(), call.c.acquire()])
def test_release_order(self):
locks = {"A": MagicMock(), "B": MagicMock(), "C": MagicMock()}
call_mock = MagicMock()
call_mock.a = locks["A"]
call_mock.b = locks["B"]
call_mock.c = locks["C"]
lock_chain = LockChain(locks)
lock_chain.release()
call_mock.assert_has_calls([call.a.release(), call.b.release(), call.c.release()])
def test_release_order_shuffled(self):
locks = {"A": MagicMock(), "C": MagicMock(), "B": MagicMock()}
call_mock = MagicMock()
call_mock.a = locks["A"]
call_mock.b = locks["B"]
call_mock.c = locks["C"]
lock_chain = LockChain(locks)
lock_chain.release()
call_mock.assert_has_calls([call.a.release(), call.b.release(), call.c.release()])
def test_with(self):
locks = {"A": MagicMock(), "C": MagicMock(), "B": MagicMock()}
call_mock = MagicMock()
call_mock.a = locks["A"]
call_mock.b = locks["B"]
call_mock.c = locks["C"]
with LockChain(locks) as _:
call_mock.assert_has_calls([call.a.acquire(), call.b.acquire(), call.c.acquire()])
call_mock.assert_has_calls(
[call.a.acquire(), call.b.acquire(), call.c.acquire(), call.a.release(), call.b.release(), call.c.release()]
)
class TestLockDistributor(TestCase):
@patch("samcli.lib.utils.lock_distributor.threading.Lock")
@patch("samcli.lib.utils.lock_distributor.multiprocessing.Lock")
def test_thread_get_locks(self, process_lock_mock, thread_lock_mock):
locks = [MagicMock(), MagicMock(), MagicMock(), MagicMock()]
thread_lock_mock.side_effect = locks
distributor = LockDistributor(LockDistributorType.THREAD, None)
keys = ["A", "B", "C"]
result = distributor.get_locks(keys)
self.assertEqual(result["A"], locks[1])
self.assertEqual(result["B"], locks[2])
self.assertEqual(result["C"], locks[3])
self.assertEqual(distributor.get_locks(keys)["A"], locks[1])
@patch("samcli.lib.utils.lock_distributor.threading.Lock")
@patch("samcli.lib.utils.lock_distributor.multiprocessing.Lock")
def test_process_get_locks(self, process_lock_mock, thread_lock_mock):
locks = [MagicMock(), MagicMock(), MagicMock(), MagicMock()]
process_lock_mock.side_effect = locks
distributor = LockDistributor(LockDistributorType.PROCESS, None)
keys = ["A", "B", "C"]
result = distributor.get_locks(keys)
self.assertEqual(result["A"], locks[1])
self.assertEqual(result["B"], locks[2])
self.assertEqual(result["C"], locks[3])
self.assertEqual(distributor.get_locks(keys)["A"], locks[1])
@patch("samcli.lib.utils.lock_distributor.threading.Lock")
@patch("samcli.lib.utils.lock_distributor.multiprocessing.Lock")
def test_process_manager_get_locks(self, process_lock_mock, thread_lock_mock):
manager_mock = MagicMock()
locks = [MagicMock(), MagicMock(), MagicMock(), MagicMock()]
manager_mock.dict.return_value = dict()
manager_mock.Lock.side_effect = locks
distributor = LockDistributor(LockDistributorType.PROCESS, manager_mock)
keys = ["A", "B", "C"]
result = distributor.get_locks(keys)
self.assertEqual(result["A"], locks[1])
self.assertEqual(result["B"], locks[2])
self.assertEqual(result["C"], locks[3])
self.assertEqual(distributor.get_locks(keys)["A"], locks[1])
| 43.451923
| 120
| 0.637088
| 554
| 4,519
| 5.00722
| 0.099278
| 0.074982
| 0.061283
| 0.045422
| 0.876712
| 0.851478
| 0.851478
| 0.824802
| 0.775775
| 0.749459
| 0
| 0.003343
| 0.205576
| 4,519
| 103
| 121
| 43.873786
| 0.769359
| 0
| 0
| 0.722222
| 0
| 0
| 0.079
| 0.067714
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.088889
| false
| 0
| 0.033333
| 0
| 0.144444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
192e85736942bc2980e9ea930a34e182888b7255
| 7,195
|
py
|
Python
|
Discord-Token-Checker.py
|
CLB-09/zero-tool.py
|
23af67ce62242aca7bab2defced79e643485e053
|
[
"MIT"
] | 2
|
2021-11-08T15:14:01.000Z
|
2022-02-28T00:58:25.000Z
|
Discord-Token-Checker.py
|
CLB-09/zero-tool.py
|
23af67ce62242aca7bab2defced79e643485e053
|
[
"MIT"
] | 1
|
2021-10-09T06:09:21.000Z
|
2021-10-11T08:22:06.000Z
|
Discord-Token-Checker.py
|
Zero-Tool/Zero-attacker
|
62d6c8c11c686afe338e67c05d7753f396cdb360
|
[
"MIT"
] | 1
|
2021-11-08T15:45:05.000Z
|
2021-11-08T15:45:05.000Z
|
from pytransform import pyarmor_runtime
pyarmor_runtime()
__pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x09\x00\x61\x0d\x0d\x0a\x08\x2d\xa0\x01\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xae\x06\x00\x00\x00\x00\x00\x18\x9f\x6d\x34\xb9\xe3\xbe\xb7\xb0\x0e\xe2\xca\x4e\x88\x2b\x4e\xd3\x00\x00\x00\x00\x00\x00\x00\x00\x67\x0d\x53\x5e\x4a\xad\xf5\x66\x9f\xdf\x5f\x2c\x12\xe1\xa1\xd8\x32\x70\xf3\x5c\x4a\xbe\x79\xa3\x64\x8d\x23\xb6\x0c\x73\x6f\x08\x5f\xea\x4b\xf9\x61\x15\xf4\x26\x21\xf8\x75\xf4\xa1\xd9\xb9\x9c\xf8\x83\x16\x1f\xfb\x52\xb7\xd6\xdd\x80\xe3\x18\xf3\xb9\x66\x39\x16\x99\x75\x5f\x1c\xe0\xb9\x5d\x26\x71\xbf\xa5\x79\x7c\xc8\x41\x80\x18\xbd\x39\x10\x09\x69\xe1\x46\x57\x71\xea\x10\x52\x8d\x74\xbe\x24\x38\x5b\xd8\x4d\xaa\x29\x4a\xc1\x22\xdc\x9e\x89\x9a\x83\x77\xb9\xb0\xcc\xa1\x72\x76\x56\xaa\x36\x0e\x44\x49\x0b\x5f\x12\x5f\xe7\x4e\xf8\xe8\x5a\x6c\x0b\x16\x77\xc5\x2a\xd7\x71\x2d\x00\xb8\xd8\x68\x7c\x1b\xd3\x73\x88\xb8\xbf\xb7\xb7\x8f\x20\x12\x66\x5b\x0f\x4d\x42\x38\x19\x7a\xb5\xe6\xb5\x86\x73\xaf\xa7\x62\x0d\xcd\xf9\xa5\xe0\x1b\xf5\x1b\x64\xc3\x9b\x67\x88\x21\xf6\x0b\x6c\xae\x26\x65\x6b\x71\xce\x92\x42\xc4\x96\x8b\xae\x0c\xc0\xc4\xf6\x44\xf8\x7a\x1d\xdb\xbd\x31\x5e\x1a\xe1\x59\xe1\xc9\x6a\x48\xa9\x3d\x2b\x95\x07\xce\x99\x91\xd9\x4c\x9e\xc9\xa9\xb3\x49\x74\x4f\x67\xef\xb3\x72\x0d\x47\x10\x92\xa3\xc6\x0c\xed\x96\xe1\xba\xf2\x6f\x7c\xa9\x9d\xf9\x6e\x82\x7c\xd7\x10\x36\x35\x53\x3c\xde\x17\xb3\x5b\x08\x91\x5f\x99\x32\xd3\x7e\xed\x00\xdd\x5a\x5d\x18\xbd\x6f\x83\x82\xa2\x58\xa1\xe0\xbf\x63\x9b\x52\x2b\x7e\x01\xc4\xa3\x69\xf3\x7f\x0c\xa8\xe8\x41\x03\x0f\x54\xe1\xf7\x4d\xe7\x57\xec\x43\x27\xc6\x20\x30\x46\xc6\x26\x9c\x67\x1d\xf1\x61\xbe\x57\x05\x6a\xc4\x20\xd7\x68\x05\x2a\x17\xaa\x80\x43\xaf\xe4\xaa\x5f\xe1\xa8\x19\x63\xf1\x97\x48\xa3\x39\xb9\x81\x0b\xa2\x19\xf1\x6d\x74\x22\x0d\xc7\x8f\xab\xd1\x2d\xdd\xae\x9f\x91\x97\x37\x64\xdd\xa0\x49\x4f\xc2\x82\x11\x54\x94\x35\xc5\xef\x09\x55\x1e\x8e\x67\x74\x2c\x24\x33\x40\xe9\x7a\xb2\x53\x6f\x07\xf5\x3c\x81\x16\x96\x90\x41\x76\x5c\x4a\x23\x70\x9c\xeb\x0c\x83\x12\x72\xb4\xb1\x1a\x1c\x99\x33\x3e\xc3\x08\x05\x8e\x37\xf6\xb4\xeb\x98\x0f\x83\x31\x4c\xdf\x87\xf9\x22\x16\x81\xbb\x91\x3f\x3d\x3f\x2e\x40\x5f\x40\x85\xf5\x8c\x4e\xf4\xa8\x29\x52\xd0\x53\x28\xbc\x5b\x64\x10\xe8\x29\x7f\x90\xef\xa8\xe6\x98\x1a\xaa\xa5\xd6\xa8\xf9\xbb\xef\x8f\xce\x87\xd9\x91\xb4\xc5\xa5\x6a\x89\xa5\x7d\x16\x50\xe2\xea\x49\x35\x36\x1d\xaf\x77\x28\xa7\x89\xc2\x5f\xcd\x0d\x87\x65\x99\x29\xf5\xee\x9e\x0d\x54\x81\x50\x6d\xf4\xb4\xbb\x46\x86\xe7\x7b\xbc\x61\xe1\x4c\x62\xb8\x69\x27\xcb\x48\x07\x6b\xff\x1c\x5a\x0f\x4d\x4a\xcb\xa9\x16\xe5\xd7\xe6\x75\x20\x39\xc1\xff\xdb\x6e\xca\xfb\x79\x9d\x01\xf8\xb9\xae\x86\x41\xdb\x4f\xcd\x29\xd7\xb2\x4d\x93\x26\x96\xc2\x07\x37\x56\x8c\x05\xcd\x03\x99\xb7\x32\x0c\xe5\xa8\x08\x95\x65\xe3\xc0\xcb\x61\x18\x09\xfc\xf6\x33\xc8\x4d\x33\x89\x8c\x5f\xb7\x02\xe9\x38\x11\xe2\x22\xa7\x3d\xb8\x4c\x55\x0c\x0a\xde\x0b\xa2\x8f\x65\x2d\x6b\xfb\xc1\x7a\x8e\x99\xf1\xf3\x2a\x18\x43\x2c\xe0\x43\x72\x81\x0c\x67\x33\xf6\x55\x2b\x86\x23\x9a\x4b\x15\x82\x32\x53\x20\xe1\x5a\xdf\x76\xe3\x01\x31\xa2\x5c\x5a\x19\x2e\x70\x41\xd6\x53\xea\x1c\x0e\xe1\x6f\xe3\x7a\x54\x95\x68\x2a\x38\xed\xcc\x57\xbe\xc7\x36\x1f\xe4\xc7\x72\xcc\x26\xbc\xfb\xc5\x31\xda\xd8\x0c\x91\xfe\x9e\xe4\x4f\x14\x09\xde\x5e\xaa\xd2\x27\xfe\x88\xb8\x86\x42\xf4\x45\xd3\xbb\x7c\x34\x60\x75\x43\x06\xac\x85\x28\x20\x32\x7b\x65\xfb\x90\x7e\xe3\x26\xf4\x17\x92\x2b\xa7\x19\x8a\x96\x69\xa6\x47\x34\xfa\xd3\x05\x31\x3d\xe5\x48\x17\x7b\x7f\x1e\x75\x69\x9f\x5f\x20\x98\x3d\x47\x8b\x77\x0b\x1c\x20\x3b\x1b\x81\x32\x8c\xd3\xf5\x92\x21\xcd\xc4\xf3\x56\x09\x0f\x77\x67\x48\x6d\xff\x07\x94\x15\x65\x97\xed\x18\xb6\x74\x6a\x0e\x30\x5f\x2f\x85\xed\x26\xf2\x7c\x69\x64\xa9\x7d\xd5\x86\x0e\x67\xcb\x52\x7d\x3a\x50\x6c\x0a\x48\x3d\xf2\xd0\xe5\x0c\xcf\xa0\xd3\xb5\xfa\xaf\xd5\x06\x76\x6a\x12\xfb\x40\xfa\xc0\xaa\x2f\x6b\x2b\xbb\x70\xdf\x94\xc1\x70\x42\xd4\x50\x7f\x78\xd9\x13\x3b\x68\x97\x0e\x44\x8d\xf3\xff\x73\xc2\xa7\x48\xae\x26\x8b\x76\x82\x08\xd1\x2c\xa6\xd7\x42\x28\xd6\x94\x6d\x29\x86\x01\xa9\xf1\xcf\x86\x37\x07\x31\xa0\x38\xa7\x59\x1a\x03\x57\xde\xb4\x43\x5e\xf1\xd2\xb0\x7f\x9a\x6e\xa2\x45\xc5\xa8\x93\x98\x6f\x06\xc6\x77\x09\xcc\x35\x9c\x28\xa0\xd1\x83\x46\xa6\xcb\x35\x68\xdb\x6d\xde\xe5\x27\x7d\x66\xbb\x9b\x8a\x5a\x4d\xf9\x32\xe5\xba\x07\xc5\xe1\x4e\xa4\x65\x0e\x3a\x02\x8e\x91\x0d\x6b\xd1\x68\x57\xa6\x49\x51\x46\x40\xcf\xcb\xf5\x1c\xf7\x23\x7c\x1b\xa4\xf9\xaa\xd9\xfc\x07\x9c\x61\xd3\xe3\x74\x65\x26\xa1\xb9\x5e\xc6\x6b\x60\x14\x5a\x05\xe1\xc0\x51\x6c\x29\x73\x52\xcc\xe0\xd8\x6e\x40\x45\x5f\x11\x81\x79\x95\x58\xa0\xd3\x87\x3d\xd7\xd5\x32\x6c\x7c\xf4\xd7\xca\x7a\xbf\x8c\x5b\x27\xeb\x99\xab\x0a\x49\x5d\x22\x1f\xcf\xe1\x69\xd5\x72\xaf\xed\x39\x2a\x8b\x9b\x39\x5f\x50\xc4\x84\xcd\xd1\xa2\x5a\xa0\xdf\x0b\x67\xc9\x12\x0c\x30\x5a\x7b\x13\xb1\x00\x97\xf0\x96\x6d\x63\x59\x2d\xa6\x03\xde\x32\xdf\x95\xe7\xd5\x26\x98\xd3\x76\x2e\xae\x61\x2d\x44\x12\xe3\x14\x70\xaa\x47\x43\xc1\x84\x80\x25\x9d\xdb\xb8\xa7\x8d\x87\x97\xd4\xf5\x00\xca\x80\xa8\x60\xad\x23\xbb\x61\xdc\xf6\xbe\x08\x60\xe8\xb4\xe2\xe6\xbe\x00\x4a\x7d\x9d\xa2\xa2\x83\x3b\xa0\xf3\xc2\x0a\xe1\x31\xee\x51\xb6\xcc\xd0\xf1\xbb\xac\xe2\xaf\x23\xcb\x76\x52\x1d\x9f\x18\x8e\xe0\xa1\x73\x7b\xb0\x48\xfd\x8c\xe9\x58\x4e\xb5\xa5\x78\xb5\x9d\xd3\xb5\xf8\x19\x6c\x64\x18\xaa\x94\xc6\xf2\x37\x36\x25\x59\xa9\x31\x6f\x3e\xfd\xd3\x56\xca\xdd\x5d\x4d\x23\x97\x6e\xa9\xcf\x76\x23\xdb\x21\x50\x9c\x44\xf0\xf1\xe9\xdc\x02\x57\xad\xb1\xfe\x37\xb1\x20\x07\x73\xff\xd0\xdb\x8f\x6e\x4e\xb7\xb5\x71\x5b\x5b\x07\x4a\xea\xf5\xe9\x1b\xb5\x1f\xa4\x64\x7e\x26\x69\xc5\xef\x07\x65\xb7\x35\x8c\x6a\x4d\x6c\xc1\x73\xb5\x81\x66\x3b\x06\xfd\x20\x25\x21\x61\xe4\x2b\x15\x1a\x62\xc8\x72\xae\x56\x10\x0f\x55\x69\x0b\x6b\x97\x82\xf0\xc7\x8e\x03\xe6\x06\x22\xde\x30\x4c\x9e\xab\x93\x97\x9f\xeb\x68\x17\xc2\xe1\xeb\xa4\x13\xaf\xfd\xdc\xfa\xea\x5a\x3d\x3b\xd9\x29\xc4\x2a\x2c\x5c\x11\x47\x47\x99\x3a\xc2\x43\x03\x70\x8b\xec\x0c\x78\x9a\xa2\x94\x4b\xa8\xe0\x64\xa7\x11\x3c\xf9\x37\xd2\xa1\x7d\xd8\x1a\xe8\xea\x61\x6f\x46\x2d\xb8\x92\x22\x4a\xa1\x83\xf1\x86\x75\x1a\x94\xe6\x31\x8a\x9f\x5b\xac\x64\xbb\x6c\xfb\x67\x79\x6f\xeb\x68\x79\x86\xcb\xe1\x95\x32\xc6\x06\x20\x1f\x53\x51\x5c\x5b\x98\x07\xf0\x72\xf1\xec\x9f\x22\x26\x83\xa5\x41\x26\x91\xf3\x59\xdb\xa3\xf9\x09\x4e\x04\xe4\x9c\x4a\x24\x35\x9b\x18\xbf\x7f\x52\xa2\xa5\x54\xa6\x27\x72\x19\x9e\x42\x50\xef\x97\x9d\x08\x8d\x97\x7e\x4f\xb0\xa3\xd9\x57\xbe\xd9\x5a\xd2\xf0\x5e\x27\xb6\x01\x86\x12\x0a\xd8\x38\xad\x98\x02\x44\x21\x16\xe3\x4a\x8f\x8a\x8a\xab\xc2\x70\x53\x51\x3f\x63\x6d\x78\x05\xd8\x73\xd3\xa6\xd0\x5a\x39\x14\xe8\xdd\x27\x75\x7a\xe2\x7a\x63\xa9\x43\xae\x04\x1d\x32\x05\x85\x9e\x13\x91\xf5\x18\x5e\x86\xb3\xc6\xf9\xaa\xbd\x26\x4b\xe9\xcc\x95\xa9\x2b\x79\xf1\x4e\x4b\xea\xec\x9f\x2c\xc5\x31\xd5\x21\x28\x22\x6d\x98\x7d\x50\x5d\x86\x55\xba\x9d\x0d\x84\xf1\x82\x7d\x27\xcb\xd2\x60\x8a\x86\x87\xb7\x0b\xf7\x0e\xe2\xd8\x5f\xc8\xff\xe3\xd9\x46\x03\xf5\xed\x6c\x8e\x48\x99\xfb\x82\x22\x9f\xfe\x96\x57\xfa\xae\xd3\xb6\xe2\x3a\xe0\x4a\x14\x0a\x15\x1b\xf0\xee\xe9\xe8\x42\x0e\x04\x5c\x19\x04\x44\xfb\xe5\x43\xfe\x10\xfb\x8b\x6a\xb7\x0e\x14\xb9\xbe\xfe\xa1\xa6\x59\x62\xaf\xbe\xf9\x2d\x9a\xdf\xfd\x3e\xb7\x43\xf4\x24\xfd\xe1\xe0\x7c\xa2\x35\x0d\x9c\x74\xea\x10\x8f\x52\x4b', 2)
| 2,398.333333
| 7,135
| 0.750799
| 1,786
| 7,195
| 3.016797
| 0.148376
| 0.021158
| 0.021715
| 0.017817
| 0.007238
| 0.004454
| 0.004454
| 0
| 0
| 0
| 0
| 0.318859
| 0.00139
| 7,195
| 3
| 7,135
| 2,398.333333
| 0.431037
| 0
| 0
| 0
| 0
| 0.333333
| 0.986378
| 0.986378
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 11
|
1960723187a6473239d1c5730b2d6ce5fd01bd01
| 22,027
|
py
|
Python
|
tg_bot.py
|
SpereShelde/Stock_news
|
a9e8a5f94321214037a7f1183dcdb644c200d034
|
[
"MIT"
] | 1
|
2020-12-19T13:40:35.000Z
|
2020-12-19T13:40:35.000Z
|
tg_bot.py
|
SpereShelde/Stock_news
|
a9e8a5f94321214037a7f1183dcdb644c200d034
|
[
"MIT"
] | null | null | null |
tg_bot.py
|
SpereShelde/Stock_news
|
a9e8a5f94321214037a7f1183dcdb644c200d034
|
[
"MIT"
] | 3
|
2020-04-21T06:34:57.000Z
|
2020-12-19T13:40:36.000Z
|
import logging
from telegram.ext import Updater, CallbackContext, CommandHandler
import sqlite3
from eastmoney import east_money
from sina_bulletin import sina_bulletin
from sina_news import sina_news
open("news.log", "w").close()
east_money.start()
sina_news.start()
sina_bulletin.start()
logging.basicConfig(level=logging.DEBUG,
filename="news.log",
datefmt='%Y/%m/%d %H:%M:%S',
format='%(asctime)s - %(levelname)s - %(module)s@%(lineno)d : %(message)s')
logger = logging.getLogger()
def menu(update, context):
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT state, language FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if result:
if result[1] == 1: # English
menu = "Hi,%s. I'm here.\n" \
"What can I do for you today?\n" \
"/m View main menu\n" \
"/a Add stock code, like \"sh600519\"\n" \
"/rm Remove stock code, like \"sh600519\"\n" \
"/vc View all stock codes" \
"/sk Add skip word\n" \
"/ca Remove skip word" \
"/vw View all skip words\n" \
"/st Start pushing news\n" \
"/sp Stop pushing news\n" \
"/cn 中文\n" % update.message.from_user['first_name']
if result[0] == 1:
menu += "You are receiving news push"
else:
menu += "You are not receiving news push"
else:
menu = "您好,%s. 很高兴为您服务.\n" \
"今天想做些什么呢\n" \
"/m 主菜单\n" \
"/a 添加股票代码, 格式如 \"sh600519\"\n" \
"/rm 移除股票代码, 格式如 \"sh600519\"\n" \
"/vc 查看所有股票代码" \
"/sk 添加屏蔽词\n" \
"/ca 移除屏蔽词\n" \
"/vw 查看所有屏蔽词\n" \
"/st 开始接受新闻推送\n" \
"/sp 停止接受新闻推送\n" \
"/en English" % update.message.from_user['first_name']
if result[0] == 1:
menu += "You are receiving news push"
else:
menu += "You are not receiving news push"
context.bot.send_message(chat_id=update.message.from_user['id'], text=menu)
else:
menu = "Hi,%s. I'm here.\n" \
"What can I do for you today?\n" \
"/m View main menu\n" \
"/a Add stock code, like \"sh600519\"\n" \
"/rm Remove stock code, like \"sh600519\"\n" \
"/vc View all stock codes" \
"/sk Add skip word\n" \
"/ca Remove skip word\n" \
"/vw View all skip words\n" \
"/st Start pushing news\n" \
"/sp Stop pushing news\n" % update.message.from_user['first_name']
context.bot.send_message(chat_id=id, text=menu)
return
cursor.close()
connection.close()
def add(update, context):
code = update.message.text.rstrip().lstrip()[3:]
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT language FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
lan = result[0]
if not code:
if lan == 1:
context.bot.send_message(chat_id=id, text="Please append stock code after /a\nlike \"/add sh600519\"")
else:
context.bot.send_message(chat_id=id, text="请在 /a 命令后输入股票代码\n比如 \"/a sh600519\"")
return
result = None
try:
cursor.execute("SELECT id FROM codes WHERE code = ? and chat_id = ?", (code, id))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query stock code.")
if result:
try:
cursor.execute("UPDATE codes set state=? WHERE code = ? and chat_id = ?", (1, code, id))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot update stock code")
connection.rollback()
else:
try:
cursor.execute("INSERT INTO codes (code, state, chat_id, first) VALUES (?,?,?,?)", (code, 1, id, 1))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot insert stock code")
connection.rollback()
result = None
try:
cursor.execute("SELECT code FROM codes WHERE state = ? and chat_id = ?", (1, id))
result = cursor.fetchall()
except Exception as e:
logger.error(e)
logger.error("Cannot query stock code")
cursor.close()
connection.close()
if lan == 1:
words = "Successfully add new stock code %s\nNow I push news of these codes to you:" % code
else:
words = "成功添加股票代码 %s\n现在我推送这些代码的公司新闻:" % code
if result:
for re in result:
words += "\n%s" % re[0]
context.bot.send_message(chat_id=id, text=words)
def remove(update, context):
code = update.message.text.rstrip().lstrip()[4:]
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT language FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
lan = result[0]
if not code:
if lan == 1:
context.bot.send_message(chat_id=id, text="Please append stock code after /rm\nlike \"/rm sh600519\"")
else:
context.bot.send_message(chat_id=id, text="请在 /rm 命令后输入股票代码\n比如 \"/rm sh600519\"")
return
result = None
try:
cursor.execute("SELECT id FROM codes WHERE code = ? and chat_id = ?", (code, id))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query code")
if result:
try:
cursor.execute("UPDATE codes set state=? WHERE code = ? and chat_id = ?", (-1, code, id))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot update code")
connection.rollback()
if lan == 1:
words = "Successfully remove stock code %s\nNow I push news of those codes:" % code
else:
words = "成功删除股票代码 %s\n现在我推送这些代码的公司:" % code
else:
words = "我现在并不推送 \"%s\"\n我只推送这些代码的公司:" % code
result = None
try:
cursor.execute("SELECT code FROM codes WHERE state = ? and chat_id = ?", (1, id))
result = cursor.fetchall()
except Exception as e:
logger.error(e)
logger.error("Cannot query code")
cursor.close()
connection.close()
if result:
for re in result:
words += "\n%s" % re[0]
context.bot.send_message(chat_id=id, text=words)
def skip(update, context):
word = update.message.text.rstrip().lstrip()[4:]
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT id FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
lan = result[0]
if not word:
if lan == 1:
context.bot.send_message(chat_id=id, text="Please append the word after /sk\nlike \"/sk 提问\"")
else:
context.bot.send_message(chat_id=id, text="请在 /sk 命令后输入关键词\nlike \"/sk 提问\"")
return
result = None
try:
cursor.execute("SELECT id FROM skip_words WHERE word = ? and chat_id = ?", (word,id))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query skip word")
if result:
try:
cursor.execute("UPDATE skip_words set state=? WHERE word = ? and chat_id = ?", (1, word, id))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot update skip word")
connection.rollback()
else:
try:
cursor.execute("INSERT INTO skip_words (word, state, chat_id) VALUES (?,?,?)", (word, 1,id))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot insert skip word")
connection.rollback()
result = None
try:
cursor.execute("SELECT word FROM skip_words WHERE state = ? and chat_id = ?", (1,id))
result = cursor.fetchall()
except Exception as e:
logger.error(e)
logger.error("Cannot query skip word")
cursor.close()
connection.close()
if lan == 1:
words = "Successfully add new skip word %s\nNow I skip those key words:" % word
else:
words = "成功添加筛选关键词 %s\n我现在不推送包含以下关键词的新闻:" % word
if result:
for re in result:
words += "\n%s" % re[0]
context.bot.send_message(chat_id=id, text=words)
def cancel(update, context):
word = update.message.text.rstrip().lstrip()[4:]
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT language FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
lan = result[0]
if not word:
if lan == 1:
context.bot.send_message(chat_id=id, text="Please append the word after /ca\nlike \"/ca 提问\"")
else:
context.bot.send_message(chat_id=id, text="请在 /ca 命令后输入关键词\n比如 \"/ca 提问\"")
return
result = None
try:
cursor.execute("SELECT id FROM skip_words WHERE word = ? and chat_id = ?", (word,id))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query skip word")
if result:
try:
cursor.execute("UPDATE skip_words set state=? WHERE word = ? and chat_id = ?", (-1, word, id))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot update skip word")
connection.rollback()
if lan == 1:
words = "Successfully remove skip word %s\nNow I skip those key words:" % word
else:
words = "成功删除筛选关键词 %s\n我现在不推送包含以下关键词的新闻:" % word
else:
words = "I currently do not skip the word \"%s\"\nInstead, I skip those key words:" % word
result = None
try:
cursor.execute("SELECT word FROM skip_words WHERE state = ? and chat_id = ?", (1, id))
result = cursor.fetchall()
except Exception as e:
logger.error(e)
logger.error("Cannot query skip word")
cursor.close()
connection.close()
if result:
for re in result:
words += "\n%s" % re[0]
context.bot.send_message(chat_id=id, text=words)
def start(update, context):
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT language FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
try:
cursor.execute("INSERT INTO users (chat_id, state, language) VALUES (?,?,?)",
(id, 1, 1))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot add new user %d" % id)
connection.rollback()
context.bot.send_message(chat_id=id, text="Congratulations! You are now registered\nI'm going to push news to you.\nType /sp to stop.")
return
else:
lan = result[0]
try:
cursor.execute("UPDATE users set state=? WHERE chat_id = ?", (1, id))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot update user state.")
connection.rollback()
if lan == 1:
context.bot.send_message(chat_id=id, text="You already registered.\nType /sp to stop.")
else:
context.bot.send_message(chat_id=id, text="您已经注册\n输入 /sp 停止推送")
return
def stop(update, context):
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT language FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="How can I stop if I haven't even started?")
return
else:
lan = result[0]
try:
cursor.execute("UPDATE users set state=? WHERE chat_id = ?", (-1, id))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot update user state.")
connection.rollback()
if lan == 1:
context.bot.send_message(chat_id=id, text="Alright. I'll go offline for a while...\nType /st to start again. Miss you.")
else:
context.bot.send_message(chat_id=id,
text="好吧. 我离开一段时间...\n输入 /st 召回我哦,会想你的")
return
def push_news(context: CallbackContext):
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
results = None
try:
cursor.execute("SELECT content, link, code from news WHERE pushed = ? ORDER BY time ASC LIMIT 5", (-1,))
results = cursor.fetchall()
except Exception as e:
logger.error(e)
logger.error("Cannot query unpushed news")
if results:
for result in results:
content = result[0]
link = result[1]
code = result[2]
try:
cursor.execute("UPDATE news set pushed=? WHERE link = ?", (1, link))
connection.commit()
except Exception as e:
logger.error(e)
logger.error("Cannot update unpushed news")
connection.rollback()
users = None
try:
if code == "General":
cursor.execute("SELECT chat_id from codes WHERE state = ? and first = ? GROUP BY chat_id", (1, 0))
else:
cursor.execute("SELECT chat_id from codes WHERE code = ? and first = ?", (code, 0))
users = cursor.fetchall()
except Exception as e:
logger.error(e)
logger.error("Cannot query users")
if users:
for user in users:
id = user[0]
words = None
try:
cursor.execute("SELECT word FROM skip_words WHERE state = ? and chat_id = ?", (1, id))
words = cursor.fetchall()
except Exception as e:
logger.error(e)
logger.error("Cannot query skip words")
skip = False
if words:
for word in words:
if word[0] in content:
skip = True
break
if not skip:
context.bot.send_message(chat_id=id, text='%s:\n%s\n%s' % (code, content, link))
cursor.close()
connection.close()
# print("Pushed %d news but skip %d" % (len(results) - skip_items, skip_items))
# logger.info("Pushed %d news but skip %d" % (len(results) - skip_items, skip_items))
def view_codes(update, context):
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT id FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
lan = result[0]
result = None
try:
cursor.execute("SELECT code FROM codes WHERE chat_id = ? and state = ?", (id, 1))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query stock code.")
if result:
if lan == 1:
code = "I push news of these codes to you:"
else:
code = "我目前推送这些代码的公司新闻:"
for re in result:
code += "\n%s" % re[0]
else:
code = "Currently do not push any company news to you."
context.bot.send_message(chat_id=id, text=code)
def view_words(update, context):
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT id FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
lan = result[0]
result = None
try:
cursor.execute("SELECT word FROM skip_words WHERE chat_id = ? and state = ?", (id, 1))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query skip words.")
if result:
if lan == 1:
word = "I skip these key words for now:"
else:
word = "我目前不推送包含以下关键词的新闻:"
for re in result:
word += "\n%s" % re[0]
else:
code = "Currently do not skip any key words."
context.bot.send_message(chat_id=id, text=word)
def english(update, context):
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT id FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
try:
cursor.execute("UPDATE users set language=? WHERE chat_id = ?", (1, id))
connection.commit()
context.bot.send_message(chat_id=id, text="Ok, now we speak English.")
except Exception as e:
logger.error(e)
logger.error("Cannot update unpushed news")
connection.rollback()
def chinese(update, context):
id = update.message.from_user['id']
connection = sqlite3.connect('news.db')
cursor = connection.cursor()
result = None
try:
cursor.execute("SELECT id FROM users WHERE chat_id = ?", (id,))
result = cursor.fetchone()
except Exception as e:
logger.error(e)
logger.error("Cannot query user.")
if not result:
context.bot.send_message(chat_id=id, text="You are not registered.\nPlease type /st to register.\n你还没有注册,输入/st 即可注册")
return
try:
cursor.execute("UPDATE users set language=? WHERE chat_id = ?", (2, id))
connection.commit()
context.bot.send_message(chat_id=id, text="好的,我们开始讲中文吧")
except Exception as e:
logger.error(e)
logger.error("Cannot update unpushed news")
connection.rollback()
updater = Updater('', use_context=True)
updater.job_queue.run_repeating(push_news, interval=30, first=0)
updater.dispatcher.add_handler(CommandHandler('m', menu))
updater.dispatcher.add_handler(CommandHandler('menu', menu))
updater.dispatcher.add_handler(CommandHandler('start', menu))
updater.dispatcher.add_handler(CommandHandler('a', add))
updater.dispatcher.add_handler(CommandHandler('rm', remove))
updater.dispatcher.add_handler(CommandHandler('sk', skip))
updater.dispatcher.add_handler(CommandHandler('ca', cancel))
updater.dispatcher.add_handler(CommandHandler('st', start))
updater.dispatcher.add_handler(CommandHandler('sp', stop))
updater.dispatcher.add_handler(CommandHandler('vc', view_codes))
updater.dispatcher.add_handler(CommandHandler('vw', view_words))
updater.dispatcher.add_handler(CommandHandler('en', english))
updater.dispatcher.add_handler(CommandHandler('cn', chinese))
updater.start_polling()
updater.idle()
| 38.241319
| 143
| 0.574613
| 2,743
| 22,027
| 4.554502
| 0.091141
| 0.040343
| 0.069159
| 0.051869
| 0.8211
| 0.781237
| 0.764108
| 0.761546
| 0.710398
| 0.699912
| 0
| 0.009333
| 0.304399
| 22,027
| 575
| 144
| 38.307826
| 0.806031
| 0.007672
| 0
| 0.721223
| 0
| 0.005396
| 0.256017
| 0.011302
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021583
| false
| 0
| 0.010791
| 0
| 0.06295
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
197ac78649a96ab8126c3168dce58831acfc07b3
| 190
|
py
|
Python
|
ptstat/__init__.py
|
timmyzhao/ptstat
|
0401203e5b6053df6d62b2af9ab4b831f1b41660
|
[
"MIT"
] | 116
|
2017-06-23T07:21:28.000Z
|
2022-03-11T20:27:22.000Z
|
ptstat/__init__.py
|
afcarl/ptstat
|
0401203e5b6053df6d62b2af9ab4b831f1b41660
|
[
"MIT"
] | 2
|
2017-06-23T22:17:56.000Z
|
2017-06-28T07:31:40.000Z
|
ptstat/__init__.py
|
afcarl/ptstat
|
0401203e5b6053df6d62b2af9ab4b831f1b41660
|
[
"MIT"
] | 12
|
2017-06-25T02:59:59.000Z
|
2020-07-19T21:33:45.000Z
|
from .core import *
from ptstat.dist.normal import Normal
from ptstat.dist.categorical import Categorical
from ptstat.dist.bernoulli import Bernoulli
from ptstat.dist.uniform import Uniform
| 31.666667
| 47
| 0.842105
| 27
| 190
| 5.925926
| 0.333333
| 0.25
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 190
| 5
| 48
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
19818936bd9b3f7f4b7e5cfa386ae7d0c0308fe9
| 23,930
|
gyp
|
Python
|
starboard_platform.gyp
|
moorthy-bs/rpi-cobalt-wayland
|
8fb89b96ec2ab825dfd7eb4b5135575680085a87
|
[
"Apache-2.0"
] | 2
|
2019-10-02T14:37:47.000Z
|
2019-10-16T15:32:11.000Z
|
starboard_platform.gyp
|
stagingrdkm/rpi-cobalt-wayland
|
b12e884ddf6d05cd9d048fba225b84b59c9d958c
|
[
"Apache-2.0"
] | 1
|
2021-01-27T12:10:21.000Z
|
2021-01-27T12:10:21.000Z
|
starboard_platform.gyp
|
moorthy-bs/rpi-cobalt-wayland
|
8fb89b96ec2ab825dfd7eb4b5135575680085a87
|
[
"Apache-2.0"
] | 2
|
2019-10-16T15:32:12.000Z
|
2020-07-30T15:23:03.000Z
|
# Copyright 2015 The Cobalt Authors. All Rights Reserved.
# Copyright 2019 RDK Management
# Copyright 2019 Liberty Global B.V.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
{
'variables': {
'sb_pedantic_warnings': 1,
},
'targets': [
{
'target_name': 'starboard_base_symbolize',
'type': 'static_library',
'sources': [
'<(DEPTH)/base/third_party/symbolize/demangle.cc',
'<(DEPTH)/base/third_party/symbolize/symbolize.cc',
],
},
{
'target_name': 'starboard_platform',
'type': 'static_library',
'sources': [
'<(DEPTH)/starboard/linux/shared/atomic_public.h',
'<(DEPTH)/starboard/linux/shared/configuration_public.h',
'<(DEPTH)/starboard/linux/shared/system_get_connection_type.cc',
'<(DEPTH)/starboard/linux/shared/system_get_device_type.cc',
'<(DEPTH)/starboard/linux/shared/system_get_path.cc',
'<(DEPTH)/starboard/linux/shared/system_has_capability.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/main.cc',
'<(DEPTH)//third_party/starboard/raspi/wayland/system_get_property.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/thread_create_priority.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/application_wayland.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/abstract_decoder.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/audio_decoder.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/cobalt_source.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/video_decoder.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/player_interface.cc',
'<(DEPTH)/third_party/starboard/raspi/wayland/player_private.cc',
'<(DEPTH)/starboard/shared/starboard/link_receiver.cc',
'<(DEPTH)/starboard/shared/wayland/dev_input.cc',
'<(DEPTH)/starboard/shared/wayland/egl_workaround.cc',
'<(DEPTH)/starboard/shared/wayland/native_display_type.cc',
'<(DEPTH)/starboard/shared/wayland/window_create.cc',
'<(DEPTH)/starboard/shared/wayland/window_destroy.cc',
'<(DEPTH)/starboard/shared/wayland/window_get_platform_handle.cc',
'<(DEPTH)/starboard/shared/wayland/window_get_size.cc',
'<(DEPTH)/starboard/shared/wayland/window_internal.cc',
'<(DEPTH)/starboard/shared/egl/system_egl.cc',
'<(DEPTH)/starboard/raspi/shared/system_gles2.cc',
'<(DEPTH)/starboard/shared/stub/system_sign_with_certification_secret_key.cc',
'<(DEPTH)/starboard/shared/stub/cpu_features_get.cc',
'<(DEPTH)/starboard/shared/starboard/log_mutex.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_allocate_aligned_unchecked.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_allocate_unchecked.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_free.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_free_aligned.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_map.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_protect.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_reallocate_unchecked.cc',
'<(DEPTH)/starboard/shared/dlmalloc/memory_unmap.cc',
'<(DEPTH)/starboard/shared/gcc/atomic_gcc.h',
'<(DEPTH)/starboard/shared/iso/character_is_alphanumeric.cc',
'<(DEPTH)/starboard/shared/iso/character_is_digit.cc',
'<(DEPTH)/starboard/shared/iso/character_is_hex_digit.cc',
'<(DEPTH)/starboard/shared/iso/character_is_space.cc',
'<(DEPTH)/starboard/shared/iso/character_is_upper.cc',
'<(DEPTH)/starboard/shared/iso/character_to_lower.cc',
'<(DEPTH)/starboard/shared/iso/character_to_upper.cc',
'<(DEPTH)/starboard/shared/iso/directory_close.cc',
'<(DEPTH)/starboard/shared/iso/directory_get_next.cc',
'<(DEPTH)/starboard/shared/iso/directory_open.cc',
'<(DEPTH)/starboard/shared/iso/double_absolute.cc',
'<(DEPTH)/starboard/shared/iso/double_exponent.cc',
'<(DEPTH)/starboard/shared/iso/double_floor.cc',
'<(DEPTH)/starboard/shared/iso/double_is_finite.cc',
'<(DEPTH)/starboard/shared/iso/double_is_nan.cc',
'<(DEPTH)/starboard/shared/iso/memory_compare.cc',
'<(DEPTH)/starboard/shared/iso/memory_copy.cc',
'<(DEPTH)/starboard/shared/iso/memory_find_byte.cc',
'<(DEPTH)/starboard/shared/iso/memory_move.cc',
'<(DEPTH)/starboard/shared/iso/memory_set.cc',
'<(DEPTH)/starboard/shared/iso/string_compare.cc',
'<(DEPTH)/starboard/shared/iso/string_compare_all.cc',
'<(DEPTH)/starboard/shared/iso/string_find_character.cc',
'<(DEPTH)/starboard/shared/iso/string_find_last_character.cc',
'<(DEPTH)/starboard/shared/iso/string_find_string.cc',
'<(DEPTH)/starboard/shared/iso/string_get_length.cc',
'<(DEPTH)/starboard/shared/iso/string_get_length_wide.cc',
'<(DEPTH)/starboard/shared/iso/string_parse_double.cc',
'<(DEPTH)/starboard/shared/iso/string_parse_signed_integer.cc',
'<(DEPTH)/starboard/shared/iso/string_parse_uint64.cc',
'<(DEPTH)/starboard/shared/iso/string_parse_unsigned_integer.cc',
'<(DEPTH)/starboard/shared/iso/string_scan.cc',
'<(DEPTH)/starboard/shared/iso/system_binary_search.cc',
'<(DEPTH)/starboard/shared/iso/system_sort.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_add.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_create.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_destroy.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_internal.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_remove.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_wait.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_wait_timed.cc',
'<(DEPTH)/starboard/shared/libevent/socket_waiter_wake_up.cc',
'<(DEPTH)/starboard/shared/linux/byte_swap.cc',
'<(DEPTH)/starboard/shared/linux/get_home_directory.cc',
'<(DEPTH)/starboard/shared/linux/memory_get_stack_bounds.cc',
'<(DEPTH)/starboard/shared/linux/page_internal.cc',
'<(DEPTH)/starboard/shared/linux/socket_get_interface_address.cc',
'<(DEPTH)/starboard/shared/linux/system_get_random_data.cc',
'<(DEPTH)/starboard/shared/linux/system_get_stack.cc',
'<(DEPTH)/starboard/shared/linux/system_get_total_cpu_memory.cc',
'<(DEPTH)/starboard/shared/linux/system_get_used_cpu_memory.cc',
'<(DEPTH)/starboard/shared/linux/system_is_debugger_attached.cc',
'<(DEPTH)/starboard/shared/linux/system_symbolize.cc',
'<(DEPTH)/starboard/shared/linux/thread_get_id.cc',
'<(DEPTH)/starboard/shared/linux/thread_get_name.cc',
'<(DEPTH)/starboard/shared/linux/thread_set_name.cc',
'<(DEPTH)/starboard/shared/nouser/user_get_current.cc',
'<(DEPTH)/starboard/shared/nouser/user_get_property.cc',
'<(DEPTH)/starboard/shared/nouser/user_get_signed_in.cc',
'<(DEPTH)/starboard/shared/nouser/user_internal.cc',
'<(DEPTH)/starboard/shared/posix/directory_create.cc',
'<(DEPTH)/starboard/shared/posix/file_can_open.cc',
'<(DEPTH)/starboard/shared/posix/file_close.cc',
'<(DEPTH)/starboard/shared/posix/file_delete.cc',
'<(DEPTH)/starboard/shared/posix/file_exists.cc',
'<(DEPTH)/starboard/shared/posix/file_flush.cc',
'<(DEPTH)/starboard/shared/posix/file_get_info.cc',
'<(DEPTH)/starboard/shared/posix/file_get_path_info.cc',
'<(DEPTH)/starboard/shared/posix/file_open.cc',
'<(DEPTH)/starboard/shared/posix/file_read.cc',
'<(DEPTH)/starboard/shared/posix/file_seek.cc',
'<(DEPTH)/starboard/shared/posix/file_truncate.cc',
'<(DEPTH)/starboard/shared/posix/file_write.cc',
'<(DEPTH)/starboard/shared/posix/log.cc',
'<(DEPTH)/starboard/shared/posix/log_flush.cc',
'<(DEPTH)/starboard/shared/posix/log_format.cc',
'<(DEPTH)/starboard/shared/posix/log_is_tty.cc',
'<(DEPTH)/starboard/shared/posix/log_raw.cc',
'<(DEPTH)/starboard/shared/posix/memory_flush.cc',
'<(DEPTH)/starboard/shared/posix/set_non_blocking_internal.cc',
'<(DEPTH)/starboard/shared/posix/socket_accept.cc',
'<(DEPTH)/starboard/shared/posix/socket_bind.cc',
'<(DEPTH)/starboard/shared/posix/socket_clear_last_error.cc',
'<(DEPTH)/starboard/shared/posix/socket_connect.cc',
'<(DEPTH)/starboard/shared/posix/socket_create.cc',
'<(DEPTH)/starboard/shared/posix/socket_destroy.cc',
'<(DEPTH)/starboard/shared/posix/socket_free_resolution.cc',
'<(DEPTH)/starboard/shared/posix/socket_get_last_error.cc',
'<(DEPTH)/starboard/shared/posix/socket_get_local_address.cc',
'<(DEPTH)/starboard/shared/posix/socket_internal.cc',
'<(DEPTH)/starboard/shared/posix/socket_is_connected.cc',
'<(DEPTH)/starboard/shared/posix/socket_is_connected_and_idle.cc',
'<(DEPTH)/starboard/shared/posix/socket_join_multicast_group.cc',
'<(DEPTH)/starboard/shared/posix/socket_listen.cc',
'<(DEPTH)/starboard/shared/posix/socket_receive_from.cc',
'<(DEPTH)/starboard/shared/posix/socket_resolve.cc',
'<(DEPTH)/starboard/shared/posix/socket_send_to.cc',
'<(DEPTH)/starboard/shared/posix/socket_set_broadcast.cc',
'<(DEPTH)/starboard/shared/posix/socket_set_receive_buffer_size.cc',
'<(DEPTH)/starboard/shared/posix/socket_set_reuse_address.cc',
'<(DEPTH)/starboard/shared/posix/socket_set_send_buffer_size.cc',
'<(DEPTH)/starboard/shared/posix/socket_set_tcp_keep_alive.cc',
'<(DEPTH)/starboard/shared/posix/socket_set_tcp_no_delay.cc',
'<(DEPTH)/starboard/shared/posix/socket_set_tcp_window_scaling.cc',
'<(DEPTH)/starboard/shared/posix/storage_write_record.cc',
'<(DEPTH)/starboard/shared/posix/string_compare_no_case.cc',
'<(DEPTH)/starboard/shared/posix/string_compare_no_case_n.cc',
'<(DEPTH)/starboard/shared/posix/string_compare_wide.cc',
'<(DEPTH)/starboard/shared/posix/string_format.cc',
'<(DEPTH)/starboard/shared/posix/string_format_wide.cc',
'<(DEPTH)/starboard/shared/posix/system_break_into_debugger.cc',
'<(DEPTH)/starboard/shared/posix/system_clear_last_error.cc',
'<(DEPTH)/starboard/shared/posix/system_get_error_string.cc',
'<(DEPTH)/starboard/shared/posix/system_get_last_error.cc',
'<(DEPTH)/starboard/shared/posix/system_get_locale_id.cc',
'<(DEPTH)/starboard/shared/posix/system_get_number_of_processors.cc',
'<(DEPTH)/starboard/shared/posix/thread_sleep.cc',
'<(DEPTH)/starboard/shared/posix/time_get_monotonic_now.cc',
'<(DEPTH)/starboard/shared/posix/time_get_monotonic_thread_now.cc',
'<(DEPTH)/starboard/shared/posix/time_get_now.cc',
'<(DEPTH)/starboard/shared/posix/time_zone_get_current.cc',
'<(DEPTH)/starboard/shared/posix/time_zone_get_name.cc',
'<(DEPTH)/starboard/shared/pthread/condition_variable_broadcast.cc',
'<(DEPTH)/starboard/shared/pthread/condition_variable_create.cc',
'<(DEPTH)/starboard/shared/pthread/condition_variable_destroy.cc',
'<(DEPTH)/starboard/shared/pthread/condition_variable_signal.cc',
'<(DEPTH)/starboard/shared/pthread/condition_variable_wait.cc',
'<(DEPTH)/starboard/shared/pthread/condition_variable_wait_timed.cc',
'<(DEPTH)/starboard/shared/pthread/mutex_acquire.cc',
'<(DEPTH)/starboard/shared/pthread/mutex_acquire_try.cc',
'<(DEPTH)/starboard/shared/pthread/mutex_create.cc',
'<(DEPTH)/starboard/shared/pthread/mutex_destroy.cc',
'<(DEPTH)/starboard/shared/pthread/mutex_release.cc',
'<(DEPTH)/starboard/shared/pthread/once.cc',
'<(DEPTH)/starboard/shared/pthread/thread_context_get_pointer.cc',
'<(DEPTH)/starboard/shared/pthread/thread_context_internal.h',
'<(DEPTH)/starboard/shared/pthread/thread_create.cc',
'<(DEPTH)/starboard/shared/pthread/thread_create_local_key.cc',
'<(DEPTH)/starboard/shared/pthread/thread_create_priority.h',
'<(DEPTH)/starboard/shared/pthread/thread_destroy_local_key.cc',
'<(DEPTH)/starboard/shared/pthread/thread_detach.cc',
'<(DEPTH)/starboard/shared/pthread/thread_get_current.cc',
'<(DEPTH)/starboard/shared/pthread/thread_get_local_value.cc',
'<(DEPTH)/starboard/shared/pthread/thread_is_equal.cc',
'<(DEPTH)/starboard/shared/pthread/thread_join.cc',
'<(DEPTH)/starboard/shared/pthread/thread_sampler_create.cc',
'<(DEPTH)/starboard/shared/pthread/thread_sampler_destroy.cc',
'<(DEPTH)/starboard/shared/pthread/thread_sampler_freeze.cc',
'<(DEPTH)/starboard/shared/pthread/thread_sampler_internal.cc',
'<(DEPTH)/starboard/shared/pthread/thread_sampler_internal.h',
'<(DEPTH)/starboard/shared/pthread/thread_sampler_is_supported.cc',
'<(DEPTH)/starboard/shared/pthread/thread_sampler_thaw.cc',
'<(DEPTH)/starboard/shared/pthread/thread_set_local_value.cc',
'<(DEPTH)/starboard/shared/pthread/thread_yield.cc',
'<(DEPTH)/starboard/shared/signal/crash_signals.cc',
'<(DEPTH)/starboard/shared/signal/crash_signals.h',
'<(DEPTH)/starboard/shared/signal/suspend_signals.cc',
'<(DEPTH)/starboard/shared/signal/suspend_signals.h',
'<(DEPTH)/starboard/shared/starboard/application.cc',
'<(DEPTH)/starboard/shared/alsa/alsa_audio_sink_type.cc',
'<(DEPTH)/starboard/shared/alsa/alsa_util.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_create.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_get_min_buffer_size_in_frames.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_destroy.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_get_max_channels_5_1.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_get_nearest_supported_sample_frequency.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_is_audio_frame_storage_type_supported_interleaved_only.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_is_audio_sample_type_supported_float32_only.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_internal.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_internal.h',
'<(DEPTH)/starboard/shared/starboard/audio_sink/audio_sink_is_valid.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/stub_audio_sink_type.cc',
'<(DEPTH)/starboard/shared/starboard/audio_sink/stub_audio_sink_type.h',
'<(DEPTH)/starboard/raspi/shared/audio_sink_type_dispatcher.cc',
'<(DEPTH)/starboard/shared/starboard/command_line.cc',
'<(DEPTH)/starboard/shared/starboard/command_line.h',
'<(DEPTH)/starboard/shared/starboard/directory_can_open.cc',
'<(DEPTH)/starboard/shared/starboard/event_cancel.cc',
'<(DEPTH)/starboard/shared/starboard/event_schedule.cc',
'<(DEPTH)/starboard/shared/starboard/file_mode_string_to_flags.cc',
'<(DEPTH)/starboard/shared/starboard/file_storage/storage_close_record.cc',
'<(DEPTH)/starboard/shared/starboard/file_storage/storage_delete_record.cc',
'<(DEPTH)/starboard/shared/starboard/file_storage/storage_get_record_size.cc',
'<(DEPTH)/starboard/shared/starboard/file_storage/storage_open_record.cc',
'<(DEPTH)/starboard/shared/starboard/file_storage/storage_read_record.cc',
'<(DEPTH)/starboard/shared/starboard/log_message.cc',
'<(DEPTH)/starboard/shared/starboard/log_raw_dump_stack.cc',
'<(DEPTH)/starboard/shared/starboard/log_raw_format.cc',
'<(DEPTH)/starboard/shared/starboard/media/codec_util.cc',
'<(DEPTH)/starboard/shared/starboard/media/codec_util.h',
'<(DEPTH)/starboard/shared/starboard/media/media_can_play_mime_and_key_system.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_audio_buffer_budget.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_audio_configuration_5_1.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_audio_output_count_single_audio_output.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_buffer_alignment.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_buffer_allocation_unit.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_buffer_garbage_collection_duration_threshold.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_buffer_padding.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_buffer_storage_type.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_initial_buffer_capacity.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_max_buffer_capacity.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_progressive_buffer_budget.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_get_video_buffer_budget.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_is_buffer_pool_allocate_on_demand.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_is_buffer_using_memory_pool.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_is_output_protected.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_is_transfer_characteristics_supported.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_set_output_protection.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_util.cc',
'<(DEPTH)/starboard/shared/starboard/media/media_util.h',
'<(DEPTH)/starboard/shared/starboard/media/mime_type.cc',
'<(DEPTH)/starboard/shared/starboard/media/mime_type.h',
'<(DEPTH)/starboard/shared/starboard/new.cc',
'<(DEPTH)/starboard/shared/starboard/player/decoded_audio_internal.cc',
'<(DEPTH)/starboard/shared/starboard/player/decoded_audio_internal.h',
'<(DEPTH)/starboard/shared/starboard/player/input_buffer_internal.cc',
'<(DEPTH)/starboard/shared/starboard/player/input_buffer_internal.h',
'<(DEPTH)/starboard/shared/starboard/player/job_queue.cc',
'<(DEPTH)/starboard/shared/starboard/player/job_queue.h',
'<(DEPTH)/starboard/shared/starboard/player/player_get_info2.cc',
'<(DEPTH)/starboard/shared/starboard/player/player_get_maximum_number_of_samples_per_write.cc',
'<(DEPTH)/starboard/shared/starboard/player/player_internal.h',
'<(DEPTH)/starboard/shared/starboard/player/player_seek2.cc',
'<(DEPTH)/starboard/shared/starboard/player/player_worker.cc',
'<(DEPTH)/starboard/shared/starboard/player/player_worker.h',
'<(DEPTH)/starboard/shared/starboard/player/filter/audio_decoder_internal.h',
'<(DEPTH)/starboard/shared/starboard/player/filter/audio_frame_tracker.cc',
'<(DEPTH)/starboard/shared/starboard/player/filter/audio_frame_tracker.h',
'<(DEPTH)/starboard/shared/starboard/player/filter/audio_time_stretcher.cc',
'<(DEPTH)/starboard/shared/starboard/player/filter/audio_time_stretcher.h',
'<(DEPTH)/starboard/shared/starboard/player/filter/decoded_audio_queue.cc',
'<(DEPTH)/starboard/shared/starboard/player/filter/filter_based_player_worker_handler.cc',
'<(DEPTH)/starboard/shared/starboard/player/filter/cpu_video_frame.cc',
'<(DEPTH)/starboard/shared/starboard/player/filter/wsola_internal.cc',
'<(DEPTH)/starboard/shared/stub/decode_target_get_info.cc',
'<(DEPTH)/starboard/shared/stub/decode_target_release.cc',
'<(DEPTH)/starboard/shared/starboard/queue_application.cc',
'<(DEPTH)/starboard/shared/starboard/string_concat.cc',
'<(DEPTH)/starboard/shared/starboard/string_concat_wide.cc',
'<(DEPTH)/starboard/shared/starboard/string_copy.cc',
'<(DEPTH)/starboard/shared/starboard/string_copy_wide.cc',
'<(DEPTH)/starboard/shared/starboard/string_duplicate.cc',
'<(DEPTH)/starboard/shared/starboard/system_get_random_uint64.cc',
'<(DEPTH)/starboard/shared/starboard/system_request_pause.cc',
'<(DEPTH)/starboard/shared/starboard/system_request_stop.cc',
'<(DEPTH)/starboard/shared/starboard/system_request_suspend.cc',
'<(DEPTH)/starboard/shared/starboard/system_request_unpause.cc',
'<(DEPTH)/starboard/shared/starboard/system_supports_resume.cc',
'<(DEPTH)/starboard/shared/starboard/window_set_default_options.cc',
'<(DEPTH)/starboard/shared/stub/image_is_decode_supported.cc',
'<(DEPTH)/starboard/shared/stub/image_decode.cc',
'<(DEPTH)/starboard/shared/stub/accessibility_get_display_settings.cc',
'<(DEPTH)/starboard/shared/stub/accessibility_get_text_to_speech_settings.cc',
'<(DEPTH)/starboard/shared/stub/cryptography_create_transformer.cc',
'<(DEPTH)/starboard/shared/stub/cryptography_destroy_transformer.cc',
'<(DEPTH)/starboard/shared/stub/cryptography_get_tag.cc',
'<(DEPTH)/starboard/shared/stub/cryptography_set_authenticated_data.cc',
'<(DEPTH)/starboard/shared/stub/cryptography_set_initialization_vector.cc',
'<(DEPTH)/starboard/shared/stub/cryptography_transform.cc',
'<(DEPTH)/starboard/shared/stub/drm_close_session.cc',
'<(DEPTH)/starboard/shared/stub/drm_create_system.cc',
'<(DEPTH)/starboard/shared/stub/drm_destroy_system.cc',
'<(DEPTH)/starboard/shared/stub/drm_generate_session_update_request.cc',
'<(DEPTH)/starboard/shared/stub/drm_is_server_certificate_updatable.cc',
'<(DEPTH)/starboard/shared/stub/drm_system_internal.h',
'<(DEPTH)/starboard/shared/stub/drm_update_server_certificate.cc',
'<(DEPTH)/starboard/shared/stub/drm_update_session.cc',
'<(DEPTH)/starboard/shared/stub/media_is_supported.cc',
'<(DEPTH)/starboard/shared/stub/media_set_audio_write_duration.cc',
'<(DEPTH)/starboard/shared/stub/system_get_extensions.cc',
'<(DEPTH)/starboard/shared/stub/system_get_total_gpu_memory.cc',
'<(DEPTH)/starboard/shared/stub/system_get_used_gpu_memory.cc',
'<(DEPTH)/starboard/shared/stub/system_hide_splash_screen.cc',
'<(DEPTH)/starboard/shared/stub/system_raise_platform_error.cc',
'<(DEPTH)/starboard/shared/stub/ui_nav_get_interface.cc',
'<(DEPTH)/starboard/shared/stub/window_get_diagonal_size_in_inches.cc',
],
'defines': [
# This must be defined when building Starboard, and must not when
# building Starboard client code.
'STARBOARD_IMPLEMENTATION',
],
'dependencies': [
'<(DEPTH)/starboard/common/common.gyp:common',
'<(DEPTH)/third_party/dlmalloc/dlmalloc.gyp:dlmalloc',
'<(DEPTH)/third_party/libevent/libevent.gyp:libevent',
'starboard_base_symbolize',
],
'cflags': [
# Generated by Audio Renderer and Audio Sink implementations.
'-Wno-reorder',
# Generated by many starboard implementation files.
'-Wno-unused-variable',
],
},
],
'target_defaults': {
'include_dirs!': [
'<(DEPTH)/third_party/khronos',
],
},
}
| 63.813333
| 126
| 0.705976
| 2,905
| 23,930
| 5.560069
| 0.146988
| 0.26523
| 0.367756
| 0.377291
| 0.808383
| 0.746347
| 0.564203
| 0.273403
| 0.13181
| 0.045319
| 0
| 0.001452
| 0.136523
| 23,930
| 374
| 127
| 63.983957
| 0.780235
| 0.035437
| 0
| 0.031073
| 0
| 0
| 0.817665
| 0.808516
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5ff617436c48f6593d2a726d107a635920af03be
| 3,173
|
py
|
Python
|
src/SimilarNeuron/utils/container.py
|
luxuncang/similar-neuron
|
b0bab30270e768ec70551d26d709e692d00f62a9
|
[
"MIT"
] | 4
|
2021-12-02T15:54:09.000Z
|
2021-12-09T14:22:05.000Z
|
src/SimilarNeuron/utils/container.py
|
luxuncang/similar-neuron
|
b0bab30270e768ec70551d26d709e692d00f62a9
|
[
"MIT"
] | 13
|
2021-12-03T07:04:39.000Z
|
2022-03-09T08:38:39.000Z
|
src/SimilarNeuron/utils/container.py
|
luxuncang/similar-neuron
|
b0bab30270e768ec70551d26d709e692d00f62a9
|
[
"MIT"
] | 1
|
2021-12-13T14:39:12.000Z
|
2021-12-13T14:39:12.000Z
|
from datetime import datetime, timedelta
class TimeBoundCache():
'''键值对时限缓存容器'''
def __init__(self, timeout : int = 1800):
self._container = []
self._timeout = timeout
def add(self, k, v, timeout : int = None):
if timeout == None:
t = self._timeout
else:
t = timeout
self._container.append({'k': k, 'v': v, 'puttime': datetime.now() + timedelta(seconds=t)})
self._gcCache()
def updata(self, k, v, timeout : int = None):
self.put(k)
self.add(k, v, timeout)
self._gcCache()
def put(self, k):
self._container = [i for i in self._container if i['k']!=k]
self._gcCache()
def items(self):
self._gcCache()
for i in self._container:
yield i['k'], i['v']
def __len__(self):
self._gcCache()
return len(self._container)
def __iter__(self):
self._gcCache()
return ({i['k']:i['v']} for i in self._container)
def __getitem__(self, index):
self._gcCache()
if isinstance(index, slice):
return [{i['k']:i['v']} for i in self._container][index]
else:
return [i['v'] for i in self._container if i['k']==index]
def __delitem__(self, index):
self._gcCache()
self._container = [i for i in self._container if i['k']!=index]
def __contains__(self, k):
self._gcCache()
return k in [i['k'] for i in self._container]
def __bool__(self):
return bool(self._container)
def _gcCache(self):
self._container = [i for i in self._container if i['puttime'] > datetime.now()]
class TimeList():
'''列表时限缓存容器'''
def __init__(self, timeout : int = 1800):
self._container = []
self._timeout = timeout
def add(self, v, timeout : int = None):
if timeout == None:
t = self._timeout
else:
t = timeout
self._container.append({'v': v, 'puttime': datetime.now() + timedelta(seconds=t)})
self._gcCache()
def updata(self, v, timeout : int = None):
self.put(v)
self.add(v, timeout)
self._gcCache()
def put(self, v):
self._container = [i for i in self._container if i['v']!=v]
self._gcCache()
def __len__(self):
self._gcCache()
return len(self._container)
def __iter__(self):
self._gcCache()
return (i['v'] for i in self._container)
def __getitem__(self, index):
self._gcCache()
if isinstance(index, slice):
return [i['v'] for i in self._container][index]
def __delitem__(self, index):
self._gcCache()
del self._container[index]
def __contains__(self, v):
self._gcCache()
return v in [i['v'] for i in self._container]
def __bool__(self):
return bool(self._container)
def _gcCache(self):
self._container = [i for i in self._container if i['puttime'] > datetime.now()]
| 28.330357
| 99
| 0.534825
| 384
| 3,173
| 4.143229
| 0.119792
| 0.220616
| 0.049026
| 0.08171
| 0.837838
| 0.825896
| 0.796983
| 0.724701
| 0.69956
| 0.69956
| 0
| 0.003774
| 0.331863
| 3,173
| 111
| 100
| 28.585586
| 0.746698
| 0.005673
| 0
| 0.626506
| 0
| 0
| 0.015172
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.277108
| false
| 0
| 0.012048
| 0.024096
| 0.445783
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ff6e9749291dde8dc42452f9fd12a06e7e1973c
| 4,906
|
py
|
Python
|
apps/genres/tests/test_api/private/test_valid_requests.py
|
GiannisClipper/payments
|
94e08144597b3f4cd0de8485edf3f5535aeb9da6
|
[
"MIT"
] | null | null | null |
apps/genres/tests/test_api/private/test_valid_requests.py
|
GiannisClipper/payments
|
94e08144597b3f4cd0de8485edf3f5535aeb9da6
|
[
"MIT"
] | null | null | null |
apps/genres/tests/test_api/private/test_valid_requests.py
|
GiannisClipper/payments
|
94e08144597b3f4cd0de8485edf3f5535aeb9da6
|
[
"MIT"
] | null | null | null |
from unittest import skip # noqa: F401
from rest_framework import status
from genres.tests.test_api import ROOT_URL, BY_ID_1_URL, LIST_URL
from . import AdminPrivateGenresAPITests, OwnerPrivateGenresAPITests
class AdminRequests(AdminPrivateGenresAPITests):
'''Test owner's valid requests to genres API.'''
# Signed admin has id > 1 other than owner's id in samples (funds, genres)
def test_post(self):
sample = self.samples['genres'][11]
res = self.api_request(ROOT_URL, 'POST', payload=sample, token=self.token)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
self.assertIn('genre', res.data)
self.assertEqual(res.data['genre']['user']['id'], sample['user']['id'])
self.assertEqual(res.data['genre']['code'], sample['code'])
self.assertEqual(res.data['genre']['name'], sample['name'])
self.assertEqual(res.data['genre']['is_income'], sample['is_income'])
self.assertEqual(res.data['genre']['fund']['id'], sample['fund']['id'])
self.assertIn(f"/genres/{res.data['genre']['id']}/", res.data['genre']['url'])
self.assertEqual(res.data['token'], self.token)
def test_get(self):
sample = self.samples['genres'][11]
self.create_genre(**sample)
res = self.api_request(BY_ID_1_URL, 'GET', token=self.token)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('genre', res.data)
self.assertEqual(res.data['genre']['user']['id'], sample['user']['id'])
self.assertEqual(res.data['genre']['code'], sample['code'])
self.assertEqual(res.data['genre']['name'], sample['name'])
self.assertEqual(res.data['genre']['is_income'], sample['is_income'])
self.assertEqual(res.data['genre']['fund']['id'], sample['fund']['id'])
self.assertIn(f"/genres/{res.data['genre']['id']}/", res.data['genre']['url'])
self.assertEqual(res.data['token'], self.token)
def test_patch(self):
sample = self.samples['genres'][11]
self.create_genre(**sample)
sample = self.samples['genres'][12]
res = self.api_request(BY_ID_1_URL, 'PATCH', payload=sample, token=self.token)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('genre', res.data)
self.assertEqual(res.data['genre']['code'], sample['code'])
self.assertEqual(res.data['genre']['name'], sample['name'])
self.assertEqual(res.data['genre']['is_income'], sample['is_income'])
self.assertEqual(res.data['genre']['fund']['id'], sample['fund']['id'])
self.assertEqual(res.data['token'], self.token)
def test_delete(self):
sample = self.samples['genres'][11]
self.create_genre(**sample)
res = self.api_request(BY_ID_1_URL, 'DELETE', token=self.token)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('genre', res.data)
self.assertEqual(res.data['genre'], {})
self.assertEqual(res.data['token'], self.token)
class OwnerRequest(OwnerPrivateGenresAPITests, AdminRequests):
'''Test owner's valid requests to genres API.'''
# Signed user has id = 1 same with owner's id in samples (funds, genres)
class AdminGetList(AdminPrivateGenresAPITests):
'''Test admin's list requests to funds API.'''
def setUp(self):
super().setUp()
self.create_genres(self.samples['genres'])
def test_get_list(self):
res = self.api_request(LIST_URL, 'GET', token=self.token)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('genres', res.data)
self.assertEqual(len(res.data['genres']), len(self.samples['genres']))
self.assertEqual(res.data['token'], self.token)
def test_get_list_passing_other_user_id(self):
res = self.api_request(LIST_URL + '?user_id=1', 'GET', token=self.token)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('genres', res.data)
self.assertEqual(res.data['token'], self.token)
class OwnerGetList(OwnerPrivateGenresAPITests):
'''Test owner's list requests to funds API.'''
def setUp(self):
super().setUp()
self.create_genres(self.samples['genres'])
def test_get_list(self):
res = self.api_request(LIST_URL, 'GET', token=self.token)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('genres', res.data)
self.assertNotEqual(len(res.data['genres']), len(self.samples['genres']))
self.assertEqual(res.data['token'], self.token)
def test_get_list_passing_self_user_id(self):
res = self.api_request(LIST_URL + '?user_id=1', 'GET', token=self.token)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertIn('genres', res.data)
self.assertEqual(res.data['token'], self.token)
| 40.545455
| 86
| 0.653078
| 646
| 4,906
| 4.826625
| 0.117647
| 0.083066
| 0.178961
| 0.162284
| 0.832585
| 0.832585
| 0.823284
| 0.805324
| 0.782553
| 0.744387
| 0
| 0.011136
| 0.176315
| 4,906
| 120
| 87
| 40.883333
| 0.760455
| 0.065838
| 0
| 0.716049
| 0
| 0
| 0.114254
| 0.014912
| 0
| 0
| 0
| 0
| 0.530864
| 1
| 0.123457
| false
| 0.024691
| 0.049383
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2767de21c4de6fe2bd21ab07c46347c31ed9895b
| 27,554
|
py
|
Python
|
CryoMOSFET/ptm.py
|
SNU-HPCS/CryoModel
|
07a3fbe3f3d44c7960b5aed562a90e204014eea0
|
[
"MIT"
] | 2
|
2021-05-26T12:32:46.000Z
|
2021-12-15T13:10:37.000Z
|
CryoMOSFET/ptm.py
|
SNU-HPCS/CryoModel
|
07a3fbe3f3d44c7960b5aed562a90e204014eea0
|
[
"MIT"
] | 1
|
2022-03-02T01:49:20.000Z
|
2022-03-18T10:37:59.000Z
|
CryoMOSFET/ptm.py
|
SNU-HPCS/CryoModel
|
07a3fbe3f3d44c7960b5aed562a90e204014eea0
|
[
"MIT"
] | null | null | null |
import sys
# Generate MOSFET variables (e.g., Tox, Ndep) from PTM
class ptm:
# User input
node = None
mos_type = None
# MOSFET varables
u0 = None
ua = None
ub = None
toxm = None
rdsw = None
pbswgd = None
nfactor = None
vsat = None
toxp = None
dvt0 = None
dvt1 = None
dsub = None
ndep = None
eta0 = None
etab = None
nsd = None
vth0 = None
ldrawn = None
xl = None
lint = None
aigbacc = None
bigbacc = None
cigbacc = None
nigbacc = None
aigbinv = None
bigbinv = None
cigbinv = None
eigbinv = None
nigbinv = None
aigc = None
bigc = None
cigc = None
aigsd = None
bigsd = None
cigsd = None
nigc = None
poxedge = None
pigcd = None
ntox = None
toxref = None
vfb = None
ngate = None
voff = None
voffl = None
kt1 = None
leff = None
def __init__ (self, node, mos_type):
# 16nm technology ptm (HP, Metal gate,High-K, Strained)
if (node == 16):
# nmos
if (mos_type == 1):
self.u0 = 0.03
self.ua = 6e-10
self.ub = 1.2e-18
self.toxm = 9.5e-10
self.rdsw = 140
self.pbswgd = 1
self.nfactor = 2.3
self.vsat = 2.9e+5
self.toxp = 7e-10
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 7e+18
self.eta0 = 0.0032
self.etab = 0
self.nsd = 2e+20
self.vth0 = 0.47965
self.ldrawn = 16e-9
self.xl = -6.5e-9
self.lint = 1.45e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.0213
self.bigc = 0.0025889
self.cigc = 0.002
self.aigsd = 0.0213
self.bigsd = 0.0025889
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 9.5e-10
self.vfb = -0.55
self.ngate = 1e+23
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.11
# pmos
elif (mos_type == 2):
self.u0 = 0.006
self.ua = 2e-9
self.ub = 5e-19
self.toxm = 1e-9
self.rdsw = 140
self.pbswgd = 1
self.nfactor = 2.1
self.vsat = 2.5e+5
self.toxp = 7e-10
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 5.5e+18
self.eta0 = 0.0032
self.etab = 0
self.nsd = 2e+20
self.vth0 = -0.43121
self.ldrawn = 16e-9
self.xl = -6.5e-9
self.lint = 1.45e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.0213
self.bigc = 0.0025889
self.cigc = 0.002
self.aigsd = 0.0213
self.bigsd = 0.0025889
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1e-09
self.vfb = 0.55
self.ngate = 1e+23
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.11
# 22nm technology ptm (HP, Metal gate,High-K, Strained)
elif (node == 22):
# nmos
if (mos_type == 1):
self.u0 = 0.04
self.ua = 6e-10
self.ub = 1.2e-18
self.toxm = 1.05e-9
self.rdsw = 145
self.pbswgd = 1
self.nfactor = 2.3
self.vsat = 2.5e+5
self.toxp = 8e-10
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 5.5e+18
self.eta0 = 0.004
self.etab = 0
self.nsd = 2e+20
self.vth0 = 0.50308
self.ldrawn = 22e-9
self.xl = -9e-9
self.lint = 2.0e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.0213
self.bigc = 0.0025889
self.cigc = 0.002
self.aigsd = 0.0213
self.bigsd = 0.0025889
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.05e-9
self.vfb = -0.55
self.ngate = 1e+23
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.11
# pmos
elif (mos_type == 2):
self.u0 = 0.0095
self.ua = 2e-009
self.ub = 5e-19
self.toxm = 1.1e-9
self.rdsw = 145
self.pbswgd = 1
self.nfactor = 2.3
self.vsat = 2.1e+5
self.toxp = 8e-10
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 4.40e+18
self.eta0 = 0.0038
self.etab = 0
self.nsd = 2e+20
self.vth0 = -0.4606
self.ldrawn = 22e-9
self.xl = -9e-9
self.lint = 2.0e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.0213
self.bigc = 0.0025889
self.cigc = 0.002
self.aigsd = 0.0213
self.bigsd = 0.0025889
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.1e-9
self.vfb = 0.55
self.ngate = 2e+20
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.11
# 32nm technology ptm (HP, Metal gate,High-K, Strained)
elif (node == 32):
# nmos
if (mos_type == 1):
self.u0 = 0.05
self.ua = 6e-010
self.ub = 1.2e-018
self.toxm = 1.15e-9
self.rdsw = 150
self.pbswgd = 1
self.nfactor = 2.508
self.vsat = 2.1e+5
self.toxp = 9e-10
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 4.12e+18
self.eta0 = 0.0048
self.etab = 0
self.nsd = 2e+20
self.vth0 = 0.49396
self.ldrawn = 32e-9
self.xl = -14e-9
self.lint = 2.7e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.020014
self.bigc = 0.0027432
self.cigc = 0.002
self.aigsd = 0.020014
self.bigsd = 0.0027432
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.15e-9
self.vfb = -0.55
self.ngate = 1e+23
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.11
# pmos
elif (mos_type == 2):
self.u0 = 0.014
self.ua = 2e-9
self.ub = 0.5e-18
self.toxm = 1.2e-09
self.rdsw = 150
self.pbswgd = 1
self.nfactor = 2.1
self.vsat = 1.8e+5
self.toxp = 0.9e-9
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 3.07e+18
self.eta0 = 0.0048
self.etab = 0
self.nsd = 2e+20
self.vth0 = -0.49155
self.ldrawn = 32e-9
self.xl = -14e-9
self.lint = 2.7e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.020014
self.bigc = 0.0027432
self.cigc = 0.002
self.aigsd = 0.020014
self.bigsd = 0.0027432
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.2e-9
self.vfb = 0.55
self.ngate = 1e+23
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.11
# 45nm technology ptm (HP, Metal gate,High-K, Strained)
elif (node == 45):
# nmos
if (mos_type == 1):
self.u0 = 0.054
self.ua = 6e-10
self.ub = 1.2e-18
self.toxm = 1.25e-9
self.rdsw = 155
self.pbswgd = 1
self.nfactor = 2.22
self.vsat = 1.7e+5
self.toxp = 1.0e-09
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 3.24e+18
self.eta0 = 0.0055
self.etab = 0
self.nsd = 2e+20
self.vth0 = 0.46893
self.ldrawn = 45e-9
self.xl = -20e-9
self.lint = 3.75e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.02
self.bigc = 0.0025
self.cigc = 0.002
self.aigsd = 0.02
self.bigsd = 0.0025
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.25e-9
self.vfb = -0.55
self.ngate = 1e+23
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.11
# pmos
elif (mos_type == 2):
self.u0 = 0.02
self.ua = 2e-9
self.ub = 0.5e-18
self.toxm = 1.3e-09
self.rdsw = 155
self.pbswgd = 1
self.nfactor = 2.1
self.vsat = 1.5e+5
self.toxp = 1.0e-09
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 2.44e+18
self.eta0 = 0.0055
self.etab = 0
self.nsd = 2e+20
self.vth0 = -0.49158
self.ldrawn = 45e-9
self.xl = -20e-9
self.lint = 3.75e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.010687
self.bigc = 0.0012607
self.cigc = 0.0008
self.aigsd = 0.010687
self.bigsd = 0.0012607
self.cigsd = 0.0008
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.3e-9
self.vfb = 0.55
self.ngate = 1e+23
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.11
# 65nm technology ptm (Bulk CMOS)
elif (node == 65):
# nmos
if (mos_type == 1):
self.u0 = 0.0491
self.ua = 6e-10
self.ub = 1.2e-18
self.toxm = 1.85e-9
self.rdsw = 165
self.pbswgd = 1
self.nfactor = 1.9
self.vsat = 1.24340e+5
self.toxp = 1.2e-9
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 2.54e+18
self.eta0 = 0.0058
self.etab = 0
self.nsd = 2e+20
self.vth0 = 0.423
self.ldrawn = 65e-9
self.xl = -30e-9
self.lint = 5.25e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.012
self.bigc = 0.0028
self.cigc = 0.002
self.aigsd = 0.012
self.bigsd = 0.0028
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.85e-9
self.vfb = -0.55
self.ngate = 2e+20
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.11
# pmos
elif (mos_type == 2):
self.u0 = 0.00574
self.ua = 2e-9
self.ub = 0.5e-18
self.toxm = 1.95e-9
self.rdsw = 165
self.pbswgd = 1
self.nfactor = 1.9
self.vsat = 0.7e+5
self.toxp = 1.2e-9
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 1.87e+18
self.eta0 = 0.0058
self.etab = 0
self.nsd = 2e+20
self.vth0 = -0.365
self.ldrawn = 65e-9
self.xl = -30e-9
self.lint = 5.25e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.69
self.bigc = 0.0012
self.cigc = 0.0008
self.aigsd = 0.0087
self.bigsd = 0.0012
self.cigsd = 0.0008
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 1.95e-9
self.vfb = 0.55
self.ngate = 2e+20
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.11
# 90nm technology ptm (Bulk CMOS)
elif (node == 90):
# nmos
if (mos_type == 1):
self.u0 = 0.0547
self.ua = 6e-10
self.ub = 1.2e-18
self.toxm = 2.05e-9
self.rdsw = 180
self.pbswgd = 1
self.nfactor = 1.7
self.vsat = 1.1376e+5
self.toxp = 1.4e-9
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 1.94e+18
self.eta0 = 0.0074
self.etab = 0
self.nsd = 2e+20
self.vth0 = 0.397
self.ldrawn = 90e-9
self.xl = -40e-9
self.lint = 7.5e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.012
self.bigc = 0.0028
self.cigc = 0.002
self.aigsd = 0.012
self.bigsd = 0.0028
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 2.05e-9
self.vfb = -0.55
self.ngate = 2e+20
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.11
# pmos
elif (mos_type == 2):
self.u0 = 0.00711
self.ua = 2e-9
self.ub = 0.5e-18
self.toxm = 2.15e-9
self.rdsw = 200
self.pbswgd = 1
self.nfactor = 1.7
self.vsat = 0.7e+5
self.toxp = 1.4e-9
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 1.43e+18
self.eta0 = 0.0074
self.etab = 0
self.nsd = 2e+20
self.vth0 = -0.339
self.ldrawn = 90e-9
self.xl = -40e-9
self.lint = 7.5e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.69
self.bigc = 0.0012
self.cigc = 0.0008
self.aigsd = 0.0087
self.bigsd = 0.0012
self.cigsd = 0.0008
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 2.15e-9
self.vfb = 0.55
self.ngate = 2e+20
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.11
# 130nm technology ptm (Bulk CMOS)
elif (node == 130):
# nmos
if (mos_type == 1):
self.u0 = 0.05928
self.ua = 6e-010
self.ub = 1.2e-018
self.toxm = 2.25e-9
self.rdsw = 200
self.pbswgd = 1
self.nfactor = 1.5
self.vsat = 1.0037e+5
self.toxp = 1.6e-9
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 1.54e+18
self.eta0 = 0.0092
self.etab = 0
self.nsd = 2e+20
self.vth0 = 0.3782
self.ldrawn = 130e-9
self.xl = -60e-9
self.lint = 10.5e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.012
self.bigc = 0.0028
self.cigc = 0.002
self.aigsd = 0.012
self.bigsd = 0.0028
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 2.25e-9
self.vfb = -0.55
self.ngate = 2e+20
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.11
# pmos
elif (mos_type == 2):
self.u0 = 0.00835
self.ua = 2.0e-9
self.ub = 0.5e-18
self.toxm = 2.35e-9
self.rdsw = 240
self.pbswgd = 1
self.nfactor = 1.5
self.vsat = 0.7e+5
self.toxp = 1.6e-9
self.dvt0 = 1
self.dvt1 = 2
self.dsub = 0.1
self.ndep = 1.14e+18
self.eta0 = 0.0092
self.etab = 0
self.nsd = 2e+20
self.vth0 = -0.321
self.ldrawn = 130e-9
self.xl = -60e-9
self.lint = 10.5e-9
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.69
self.bigc = 0.0012
self.cigc = 0.0008
self.aigsd = 0.0087
self.bigsd = 0.0012
self.cigsd = 0.0008
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 2.35e-9
self.vfb = 0.55
self.ngate = 2e+20
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.11
# 180nm technology ptm (Bulk CMOS)
elif (node == 180):
# nmos
if (mos_type == 1):
self.u0 = 0.035
self.ua = -7e-010
self.ub = 4e-018
self.toxm = 2.8e-9
self.rdsw = 240
self.pbswgd = 1
self.nfactor = 0.9
self.vsat = 0.91e5
self.toxp = 2.3e-9
self.dvt0 = 8
self.dvt1 = 0.75
self.dsub = 0.8
self.ndep = 1.21e+18
self.eta0 = 0.22
self.etab = 0
self.nsd = 1e+20
self.vth0 = 0.3999
self.ldrawn = 180e-9
self.xl = -6e-8
self.lint = 1e-8
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.012
self.bigc = 0.0028
self.cigc = 0.002
self.aigsd = 0.012
self.bigsd = 0.0028
self.cigsd = 0.002
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 2.8e-9
self.vfb = -0.55
self.ngate = 2e+20
self.voff = -0.13
self.voffl = 0
self.kt1 = -0.37
# pmos
elif (mos_type == 2):
self.u0 = 0.0063
self.ua = -1.2e-10
self.ub = 1e-18
self.toxm = 2.5e-9
self.rdsw = 240
self.pbswgd = 1
self.nfactor = 1.4
self.vsat = 0.7e+5
self.toxp = 2.1e-9
self.dvt0 = 11.2
self.dvt1 = 0.72
self.dsub = 2.8
self.ndep = 0.87e+18
self.eta0 = 8.5
self.etab = 0
self.nsd = 1e+20
self.vth0 = -0.42
self.ldrawn = 180e-9
self.xl = 0
self.lint = 3e-8
self.aigbacc = 0.012
self.bigbacc = 0.0028
self.cigbacc = 0.002
self.nigbacc = 1
self.aigbinv = 0.014
self.bigbinv = 0.004
self.cigbinv = 0.004
self.eigbinv = 1.1
self.nigbinv = 3
self.aigc = 0.69
self.bigc = 0.0012
self.cigc = 0.0008
self.aigsd = 0.0087
self.bigsd = 0.0012
self.cigsd = 0.0008
self.nigc = 1
self.poxedge = 1
self.pigcd = 1
self.ntox = 1
self.toxref = 2.5e-9
self.vfb = -0.55
self.ngate = 2e+20
self.voff = -0.126
self.voffl = 0
self.kt1 = -0.37
else:
print ("pgen doesn't support %dnm technology yet." %(node))
sys.exit ()
self.leff = self.ldrawn + self.xl - 2 * self.lint
'''TEST CODE'''
if __name__ == "__main__":
ptm_ = ptm (16, 1)
print (ptm_.leff)
| 32.608284
| 71
| 0.363541
| 3,104
| 27,554
| 3.216817
| 0.07732
| 0.075613
| 0.030446
| 0.024036
| 0.865398
| 0.864597
| 0.844266
| 0.834652
| 0.809114
| 0.783175
| 0
| 0.194241
| 0.548777
| 27,554
| 844
| 72
| 32.646919
| 0.608864
| 0.018328
| 0
| 0.764339
| 1
| 0
| 0.001814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001247
| false
| 0
| 0.001247
| 0
| 0.063591
| 0.002494
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
fd6655ba0cf48c093408a6863d83672be3b4d4f3
| 193,042
|
py
|
Python
|
api/audit_trail/migrations/0001_squashed_0028_merge_20210224_1641.py
|
uktrade/lite-ap
|
4e1a57956bd921992b4a6e2b8fbacbba5720960d
|
[
"MIT"
] | 3
|
2019-05-15T09:30:39.000Z
|
2020-04-22T16:14:23.000Z
|
api/audit_trail/migrations/0001_squashed_0028_merge_20210224_1641.py
|
uktrade/lite-ap
|
4e1a57956bd921992b4a6e2b8fbacbba5720960d
|
[
"MIT"
] | 85
|
2019-04-24T10:39:35.000Z
|
2022-03-21T14:52:12.000Z
|
api/audit_trail/migrations/0001_squashed_0028_merge_20210224_1641.py
|
uktrade/lite-ap
|
4e1a57956bd921992b4a6e2b8fbacbba5720960d
|
[
"MIT"
] | 1
|
2021-01-17T11:12:19.000Z
|
2021-01-17T11:12:19.000Z
|
# Generated by Django 3.1.8 on 2021-04-26 06:06
import api.audit_trail.enums
from django.db import migrations, models
import django.db.migrations.operations.special
import django.db.models.deletion
import django.utils.timezone
import jsonfield.fields
import model_utils.fields
import uuid
from api.audit_trail.enums import AuditType
from api.audit_trail.payload import audit_type_format
from api.cases.enums import CaseTypeEnum
from api.core.constants import Roles
def create_missing_application_audit(apps, schema_editor):
if schema_editor.connection.alias != "default":
return
ContentType = apps.get_model("contenttypes", "ContentType")
Case = apps.get_model("cases", "Case")
Audit = apps.get_model("audit_trail", "Audit")
for case in Case.objects.all():
print("Running for case {id}".format(id=case.id))
content_type = ContentType.objects.get_for_model(case)
activities = Audit.objects.filter(
target_object_id=case.id, target_content_type=content_type, verb=AuditType.UPDATED_STATUS
).order_by("created_at")
last_status = None
for activity in activities:
if "old" in activity.payload["status"]:
# all updated for case
break
if last_status == None:
# first status change assumes came from draft
last_status = activity.payload["status"]
activity.payload = {"status": {"old": "draft", "new": last_status}}
activity.save()
continue
activity.payload = {"status": {"old": last_status, "new": activity.payload["status"]}}
print("Updating activity: {id}".format(id=activity.id))
activity.save()
last_status = activity.payload["status"]["new"]
def create_missing_create_audits(apps, schema_editor):
if schema_editor.connection.alias != "default":
return
ContentType = apps.get_model("contenttypes", "ContentType")
Case = apps.get_model("cases", "Case")
Audit = apps.get_model("audit_trail", "Audit")
for case in Case.objects.filter(case_type__id=CaseTypeEnum.GOODS.id):
print("Running for goods case {id}".format(id=case.id))
content_type = ContentType.objects.get_for_model(case)
audits = Audit.objects.filter(verb=AuditType.UPDATED_STATUS, target_object_id=case.id).order_by("created_at")
for audit in audits:
if audit and audit.payload["status"]["old"] == "draft":
print("Updating draft payload")
audit.payload["status"]["old"] = "clc_review"
audit.save()
if not Audit.objects.filter(verb=AuditType.CREATED, action_object_object_id=case.id).exists():
print("Creating original audit")
Audit.objects.create(
created_at=case.created_at,
verb=AuditType.CREATED,
action_object_object_id=case.id,
action_object_content_type=content_type,
payload={"status": {"new": "clc_review"}},
)
for case in Case.objects.filter(case_type__id=CaseTypeEnum.EUA.id):
print("Running for eua case {id}".format(id=case.id))
content_type = ContentType.objects.get_for_model(case)
audits = Audit.objects.filter(verb=AuditType.UPDATED_STATUS, target_object_id=case.id).order_by("created_at")
for audit in audits:
if audit and audit.payload["status"]["old"] == "draft":
print("Updating draft payload")
audit.payload["status"]["old"] = "submitted"
audit.save()
if not Audit.objects.filter(verb=AuditType.CREATED, action_object_object_id=case.id).exists():
print("Creating original audit")
Audit.objects.create(
created_at=case.created_at,
verb=AuditType.CREATED,
action_object_object_id=case.id,
action_object_content_type=content_type,
payload={"status": {"new": "submitted"}},
)
def create_missing_case_create_audits(apps, schema_editor):
if schema_editor.connection.alias != "default":
return
ContentType = apps.get_model("contenttypes", "ContentType")
Case = apps.get_model("cases", "Case")
Audit = apps.get_model("audit_trail", "Audit")
for case in Case.objects.exclude(case_type_id__in=[CaseTypeEnum.GOODS.id, CaseTypeEnum.EUA.id]).order_by(
"created_at"
):
print("Running for audit update for case {id}".format(id=case.id))
content_type = ContentType.objects.get_for_model(case)
audits = Audit.objects.filter(verb=AuditType.UPDATED_STATUS, target_object_id=case.id).order_by("created_at")
first_audit = audits.first()
if first_audit and (
first_audit.payload["status"]["old"] == "draft" and first_audit.payload["status"]["new"] != "submitted"
):
print(first_audit.payload)
first_audit.payload["status"]["old"] = "submitted"
first_audit.save()
Audit.objects.create(
created_at=case.created_at,
verb=AuditType.UPDATED_STATUS,
target_object_id=case.id,
target_content_type=content_type,
payload={"status": {"new": "submitted", "old": "draft"}},
)
def fill_in_missing_actor(apps, schema_editor):
if schema_editor.connection.alias != "default":
return
ContentType = apps.get_model("contenttypes", "ContentType")
Audit = apps.get_model("audit_trail", "Audit")
Case = apps.get_model("cases", "Case")
UserOrganisationRelationship = apps.get_model("users", "UserOrganisationRelationship")
for audit in Audit.objects.filter(
actor_content_type__isnull=True, verb__in=[AuditType.CREATED, AuditType.UPDATED_STATUS]
):
print("Updating audit for:", audit.id)
case_id = audit.target_object_id or audit.action_object_object_id
case = Case.objects.get(id=case_id)
organisation = case.organisation
admin_relationships = UserOrganisationRelationship.objects.filter(
organisation=organisation, role=Roles.EXPORTER_SUPER_USER_ROLE_ID,
).exclude(user__first_name="")
user = admin_relationships.first().user
print("Actor: ", user)
content_type = ContentType.objects.get(model="exporteruser")
audit.actor_content_type = content_type
audit.actor_object_id = user.pk
print(audit, content_type)
audit.save()
# Verbs that have been changed
DELTA_VERBS = {
AuditType.MOVE_CASE: "moved the case to: {queues}",
AuditType.GOOD_REVIEWED: 'good was reviewed: {good_name} control code changed from "{old_control_code}" to "{new_control_code}"',
AuditType.GRANTED_APPLICATION: "granted licence for {licence_duration} months",
AuditType.UPDATE_APPLICATION_LETTER_REFERENCE: "updated the letter reference from {old_ref_number} to {new_ref_number}",
}
# Verbs that remain unchanged
EXCLUDED = [AuditType.CREATED]
def migrate_audit_verbs(apps, schema_editor):
"""
Convert old AuditType.verb with format to new AuditType.verb as enum value.
"""
if schema_editor.connection.alias != "default":
return
Audit = apps.get_model("audit_trail", "Audit")
total_updates = 0
for audit_type in AuditType:
if audit_type in EXCLUDED:
continue
old_verb = audit_type_format[audit_type]
audit_qs = Audit.objects.filter(verb=old_verb)
count = audit_qs.count()
if count:
print({"audit": audit_type.value, "count": count})
total_updates += count
audit_qs.update(verb=audit_type)
if DELTA_VERBS.get(audit_type, False):
old_audit_qs = Audit.objects.filter(verb=DELTA_VERBS[audit_type])
count = old_audit_qs.count()
if count:
print({"old_audit": audit_type.value, "count": count})
total_updates += count
old_audit_qs.update(verb=audit_type)
if total_updates:
print({"total_updates": total_updates, "total_audit_count": Audit.objects.exclude(verb__in=EXCLUDED).count()})
def update_granted_application_payload_type(apps, schema_editor):
"""
Convert old AuditType.verb with format to new AuditType.verb as enum value.
"""
if schema_editor.connection.alias != "default":
return
Audit = apps.get_model("audit_trail", "Audit")
Licence = apps.get_model("licences", "Licence")
for audit in Audit.objects.filter(verb=AuditType.GRANTED_APPLICATION):
if "start_date" not in audit.payload:
print("Updating GRANTED_APPLICATION audit payload:", audit.id)
try:
start_date = Licence.objects.get(application__id=audit.target_object_id).start_date
except Licence.DoesNotExist:
start_date = audit.created_at
audit.payload["start_date"] = start_date.date().strftime("%Y-%m-%d")
audit.save()
def update_good_review_payload(apps, schema_editor):
"""
Convert old AuditType.verb with format to new AuditType.verb as enum value.
"""
if schema_editor.connection.alias != "default":
return
Audit = apps.get_model("audit_trail", "Audit")
count = 0
for audit in Audit.objects.filter(verb=AuditType.GOOD_REVIEWED):
if "new_control_code" in audit.payload:
print("UPDAING FOR", audit.id)
new_payload = {
"good_name": audit.payload["good_name"],
"old_control_list_entry": audit.payload["old_control_code"],
"new_control_list_entry": audit.payload["new_control_code"],
}
audit.payload = new_payload
count += 1
audit.save()
if count:
print({"updated": count, "existing": Audit.objects.filter(verb=AuditType.GOOD_REVIEWED).count()})
class Migration(migrations.Migration):
replaces = [('audit_trail', '0001_initial'), ('audit_trail', '0002_migrate_old_status_payload'), ('audit_trail', '0003_queries_created_audit'), ('audit_trail', '0004_case_submitted_audits'), ('audit_trail', '0005_null_actor_fix'), ('audit_trail', '0006_verb_choices'), ('audit_trail', '0007_migrate_audit_verbs'), ('audit_trail', '0008_granted_application_backfill'), ('audit_trail', '0009_control_code_payload'), ('audit_trail', '0010_auto_20200513_1347'), ('audit_trail', '0011_auto_20200520_1522'), ('audit_trail', '0012_auto_20200521_1321'), ('audit_trail', '0013_auto_20200602_0724'), ('audit_trail', '0014_auto_20200604_0858'), ('audit_trail', '0015_auto_20200612_1108'), ('audit_trail', '0016_auto_20200617_1501'), ('audit_trail', '0017_auto_20200624_1218'), ('audit_trail', '0018_auto_20200701_0920'), ('audit_trail', '0019_auto_20200708_0913'), ('audit_trail', '0020_auto_20200708_1434'), ('audit_trail', '0021_auto_20200709_1347'), ('audit_trail', '0022_auto_20200713_0946'), ('audit_trail', '0023_auto_20200713_1633'), ('audit_trail', '0024_auto_20200717_0841'), ('audit_trail', '0025_auto_20200717_1459'), ('audit_trail', '0026_auto_20200717_1526'), ('audit_trail', '0027_auto_20210223_1815'), ('audit_trail', '0027_auto_20210222_1115'), ('audit_trail', '0028_merge_20210224_1641')]
initial = True
dependencies = [
('cases', '0013_auto_20200325_1544'),
('applications', '0022_auto_20200331_1107'),
('contenttypes', '0002_remove_content_type_name'),
('licences', '0002_licence_decisions'),
('users', '0005_auto_20200322_1547'),
]
operations = [
migrations.CreateModel(
name='Audit',
fields=[
('created_at', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created_at')),
('updated_at', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='updated_at')),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('actor_object_id', models.CharField(db_index=True, max_length=255)),
('verb', models.CharField(db_index=True, max_length=255)),
('description', models.TextField(blank=True, null=True)),
('target_object_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('action_object_object_id', models.CharField(blank=True, db_index=True, max_length=255, null=True)),
('payload', jsonfield.fields.JSONField(default=dict)),
('action_object_content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='action_object', to='contenttypes.contenttype')),
('actor_content_type', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='actor', to='contenttypes.contenttype')),
('target_content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='target', to='contenttypes.contenttype')),
],
options={
'ordering': ('-created_at',),
},
),
migrations.RunPython(
code=create_missing_application_audit,
reverse_code=django.db.migrations.operations.special.RunPython.noop,
),
migrations.RunPython(
code=create_missing_create_audits,
reverse_code=django.db.migrations.operations.special.RunPython.noop,
),
migrations.RunPython(
code=create_missing_case_create_audits,
reverse_code=django.db.migrations.operations.special.RunPython.noop,
),
migrations.RunPython(
code=fill_in_missing_actor,
reverse_code=django.db.migrations.operations.special.RunPython.noop,
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules')], db_index=True, max_length=255),
),
migrations.RunPython(
code=migrate_audit_verbs,
reverse_code=django.db.migrations.operations.special.RunPython.noop,
),
migrations.RunPython(
code=update_granted_application_payload_type,
reverse_code=django.db.migrations.operations.special.RunPython.noop,
),
migrations.RunPython(
code=update_good_review_payload,
reverse_code=django.db.migrations.operations.special.RunPython.noop,
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_GOOD_USAGE'], 'licence_updated_good_usage')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['ASSIGN_USER_TO_CASE'], 'assign_user_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_GOOD_USAGE'], 'licence_updated_good_usage')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['ASSIGN_USER_TO_CASE'], 'assign_user_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted'), (api.audit_trail.enums.AuditType['UPDATED_GOOD_ON_DESTINATION_MATRIX'], 'updated_good_on_destination_matrix'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_GOOD_USAGE'], 'licence_updated_good_usage')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['ASSIGN_USER_TO_CASE'], 'assign_user_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['CREATED_DOCUMENT_TEMPLATE'], 'created_document_template'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted'), (api.audit_trail.enums.AuditType['UPDATED_GOOD_ON_DESTINATION_MATRIX'], 'updated_good_on_destination_matrix'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_GOOD_USAGE'], 'licence_updated_good_usage')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['ASSIGN_USER_TO_CASE'], 'assign_user_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['CREATED_DOCUMENT_TEMPLATE'], 'created_document_template'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted'), (api.audit_trail.enums.AuditType['UPDATED_GOOD_ON_DESTINATION_MATRIX'], 'updated_good_on_destination_matrix'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_GOOD_USAGE'], 'licence_updated_good_usage'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_STATUS'], 'licence_updated_status')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['ASSIGN_USER_TO_CASE'], 'assign_user_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['CREATED_DOCUMENT_TEMPLATE'], 'created_document_template'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted'), (api.audit_trail.enums.AuditType['UPDATED_GOOD_ON_DESTINATION_MATRIX'], 'updated_good_on_destination_matrix'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_GOOD_USAGE'], 'licence_updated_good_usage'), (api.audit_trail.enums.AuditType['OGEL_REISSUED'], 'ogel_reissued'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_STATUS'], 'licence_updated_status')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='verb',
field=models.CharField(choices=[(api.audit_trail.enums.AuditType['CREATED'], 'created'), (api.audit_trail.enums.AuditType['OGL_CREATED'], 'ogl_created'), (api.audit_trail.enums.AuditType['OGL_FIELD_EDITED'], 'ogl_field_edited'), (api.audit_trail.enums.AuditType['OGL_MULTI_FIELD_EDITED'], 'ogl_multi_field_edited'), (api.audit_trail.enums.AuditType['ADD_FLAGS'], 'add_flags'), (api.audit_trail.enums.AuditType['REMOVE_FLAGS'], 'remove_flags'), (api.audit_trail.enums.AuditType['GOOD_REVIEWED'], 'good_reviewed'), (api.audit_trail.enums.AuditType['GOOD_ADD_FLAGS'], 'good_add_flags'), (api.audit_trail.enums.AuditType['GOOD_REMOVE_FLAGS'], 'good_remove_flags'), (api.audit_trail.enums.AuditType['GOOD_ADD_REMOVE_FLAGS'], 'good_add_remove_flags'), (api.audit_trail.enums.AuditType['DESTINATION_ADD_FLAGS'], 'destination_add_flags'), (api.audit_trail.enums.AuditType['DESTINATION_REMOVE_FLAGS'], 'destination_remove_flags'), (api.audit_trail.enums.AuditType['ADD_GOOD_TO_APPLICATION'], 'add_good_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_FROM_APPLICATION'], 'remove_good_from_application'), (api.audit_trail.enums.AuditType['ADD_GOOD_TYPE_TO_APPLICATION'], 'add_good_type_to_application'), (api.audit_trail.enums.AuditType['REMOVE_GOOD_TYPE_FROM_APPLICATION'], 'remove_good_type_from_application'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_END_USE_DETAIL'], 'update_application_end_use_detail'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_TEMPORARY_EXPORT'], 'update_application_temporary_export'), (api.audit_trail.enums.AuditType['REMOVED_SITES_FROM_APPLICATION'], 'removed_sites_from_application'), (api.audit_trail.enums.AuditType['ADD_SITES_TO_APPLICATION'], 'add_sites_to_application'), (api.audit_trail.enums.AuditType['REMOVED_EXTERNAL_LOCATIONS_FROM_APPLICATION'], 'removed_external_locations_from_application'), (api.audit_trail.enums.AuditType['ADD_EXTERNAL_LOCATIONS_TO_APPLICATION'], 'add_external_locations_to_application'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_APPLICATION'], 'removed_countries_from_application'), (api.audit_trail.enums.AuditType['ADD_COUNTRIES_TO_APPLICATION'], 'add_countries_to_application'), (api.audit_trail.enums.AuditType['ADD_ADDITIONAL_CONTACT_TO_CASE'], 'add_additional_contact_to_case'), (api.audit_trail.enums.AuditType['MOVE_CASE'], 'move_case'), (api.audit_trail.enums.AuditType['ASSIGN_CASE'], 'assign_case'), (api.audit_trail.enums.AuditType['ASSIGN_USER_TO_CASE'], 'assign_user_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE'], 'remove_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_QUEUES'], 'remove_case_from_all_queues'), (api.audit_trail.enums.AuditType['REMOVE_CASE_FROM_ALL_USER_ASSIGNMENTS'], 'remove_case_from_all_user_assignments'), (api.audit_trail.enums.AuditType['CLC_RESPONSE'], 'clc_response'), (api.audit_trail.enums.AuditType['PV_GRADING_RESPONSE'], 'pv_grading_response'), (api.audit_trail.enums.AuditType['CREATED_CASE_NOTE'], 'created_case_note'), (api.audit_trail.enums.AuditType['ECJU_QUERY'], 'ecju_query'), (api.audit_trail.enums.AuditType['UPDATED_STATUS'], 'updated_status'), (api.audit_trail.enums.AuditType['UPDATED_APPLICATION_NAME'], 'updated_application_name'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_LETTER_REFERENCE'], 'update_application_letter_reference'), (api.audit_trail.enums.AuditType['UPDATE_APPLICATION_F680_CLEARANCE_TYPES'], 'update_application_f680_clearance_types'), (api.audit_trail.enums.AuditType['ADDED_APPLICATION_LETTER_REFERENCE'], 'added_application_letter_reference'), (api.audit_trail.enums.AuditType['REMOVED_APPLICATION_LETTER_REFERENCE'], 'removed_application_letter_reference'), (api.audit_trail.enums.AuditType['ASSIGNED_COUNTRIES_TO_GOOD'], 'assigned_countries_to_good'), (api.audit_trail.enums.AuditType['REMOVED_COUNTRIES_FROM_GOOD'], 'removed_countries_from_good'), (api.audit_trail.enums.AuditType['CREATED_FINAL_ADVICE'], 'created_final_advice'), (api.audit_trail.enums.AuditType['CLEARED_FINAL_ADVICE'], 'cleared_final_advice'), (api.audit_trail.enums.AuditType['CREATED_TEAM_ADVICE'], 'created_team_advice'), (api.audit_trail.enums.AuditType['CLEARED_TEAM_ADVICE'], 'cleared_team_advice'), (api.audit_trail.enums.AuditType['CREATED_USER_ADVICE'], 'created_user_advice'), (api.audit_trail.enums.AuditType['ADD_PARTY'], 'add_party'), (api.audit_trail.enums.AuditType['REMOVE_PARTY'], 'remove_party'), (api.audit_trail.enums.AuditType['UPLOAD_PARTY_DOCUMENT'], 'upload_party_document'), (api.audit_trail.enums.AuditType['DELETE_PARTY_DOCUMENT'], 'delete_party_document'), (api.audit_trail.enums.AuditType['UPLOAD_APPLICATION_DOCUMENT'], 'upload_application_document'), (api.audit_trail.enums.AuditType['DELETE_APPLICATION_DOCUMENT'], 'delete_application_document'), (api.audit_trail.enums.AuditType['UPLOAD_CASE_DOCUMENT'], 'upload_case_document'), (api.audit_trail.enums.AuditType['GENERATE_CASE_DOCUMENT'], 'generate_case_document'), (api.audit_trail.enums.AuditType['ADD_CASE_OFFICER_TO_CASE'], 'add_case_officer_to_case'), (api.audit_trail.enums.AuditType['REMOVE_CASE_OFFICER_FROM_CASE'], 'remove_case_officer_from_case'), (api.audit_trail.enums.AuditType['GRANTED_APPLICATION'], 'granted_application'), (api.audit_trail.enums.AuditType['REINSTATED_APPLICATION'], 'reinstated_application'), (api.audit_trail.enums.AuditType['FINALISED_APPLICATION'], 'finalised_application'), (api.audit_trail.enums.AuditType['UNASSIGNED_QUEUES'], 'unassigned_queues'), (api.audit_trail.enums.AuditType['UNASSIGNED'], 'unassigned'), (api.audit_trail.enums.AuditType['CREATED_DOCUMENT_TEMPLATE'], 'created_document_template'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_NAME'], 'updated_letter_template_name'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_CASE_TYPES'], 'added_letter_template_case_types'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_CASE_TYPES'], 'updated_letter_template_case_types'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_CASE_TYPES'], 'removed_letter_template_case_types'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_DECISIONS'], 'added_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_DECISIONS'], 'updated_letter_template_decisions'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_DECISIONS'], 'removed_letter_template_decisions'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS'], 'updated_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['REMOVED_LETTER_TEMPLATE_PARAGRAPHS'], 'removed_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['ADDED_LETTER_TEMPLATE_PARAGRAPHS'], 'added_letter_template_paragraphs'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_LAYOUT'], 'updated_letter_template_layout'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_PARAGRAPHS_ORDERING'], 'updated_letter_template_paragraphs_ordering'), (api.audit_trail.enums.AuditType['UPDATED_LETTER_TEMPLATE_INCLUDE_DIGITAL_SIGNATURE'], 'updated_letter_template_include_digital_signature'), (api.audit_trail.enums.AuditType['CREATED_PICKLIST'], 'created_picklist'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_TEXT'], 'updated_picklist_text'), (api.audit_trail.enums.AuditType['UPDATED_PICKLIST_NAME'], 'updated_picklist_name'), (api.audit_trail.enums.AuditType['DEACTIVATE_PICKLIST'], 'deactivate_picklist'), (api.audit_trail.enums.AuditType['REACTIVATE_PICKLIST'], 'reactivate_picklist'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_TITLE'], 'updated_exhibition_details_title'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_START_DATE'], 'updated_exhibition_details_start_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REQUIRED_BY_DATE'], 'updated_exhibition_details_required_by_date'), (api.audit_trail.enums.AuditType['UPDATED_EXHIBITION_DETAILS_REASON_FOR_CLEARANCE'], 'updated_exhibition_details_reason_for_clearance'), (api.audit_trail.enums.AuditType['UPDATED_ROUTE_OF_GOODS'], 'updated_route_of_goods'), (api.audit_trail.enums.AuditType['UPDATED_ORGANISATION'], 'updated_organisation'), (api.audit_trail.enums.AuditType['CREATED_ORGANISATION'], 'created_organisation'), (api.audit_trail.enums.AuditType['REGISTER_ORGANISATION'], 'register_organisation'), (api.audit_trail.enums.AuditType['REJECTED_ORGANISATION'], 'rejected_organisation'), (api.audit_trail.enums.AuditType['APPROVED_ORGANISATION'], 'approved_organisation'), (api.audit_trail.enums.AuditType['REMOVED_FLAG_ON_ORGANISATION'], 'removed_flag_on_organisation'), (api.audit_trail.enums.AuditType['ADDED_FLAG_ON_ORGANISATION'], 'added_flag_on_organisation'), (api.audit_trail.enums.AuditType['RERUN_ROUTING_RULES'], 'rerun_routing_rules'), (api.audit_trail.enums.AuditType['ENFORCEMENT_CHECK'], 'enforcement_check'), (api.audit_trail.enums.AuditType['UPDATED_SITE'], 'updated_site'), (api.audit_trail.enums.AuditType['CREATED_SITE'], 'created_site'), (api.audit_trail.enums.AuditType['UPDATED_SITE_NAME'], 'updated_site_name'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_CREATE'], 'compliance_site_case_create'), (api.audit_trail.enums.AuditType['COMPLIANCE_SITE_CASE_NEW_LICENCE'], 'compliance_site_case_new_licence'), (api.audit_trail.enums.AuditType['ADDED_NEXT_REVIEW_DATE'], 'added_next_review_date'), (api.audit_trail.enums.AuditType['EDITED_NEXT_REVIEW_DATE'], 'edited_next_review_date'), (api.audit_trail.enums.AuditType['REMOVED_NEXT_REVIEW_DATE'], 'removed_next_review_date'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_CREATED'], 'compliance_visit_case_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_VISIT_CASE_UPDATED'], 'compliance_visit_case_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_CREATED'], 'compliance_people_present_created'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_UPDATED'], 'compliance_people_present_updated'), (api.audit_trail.enums.AuditType['COMPLIANCE_PEOPLE_PRESENT_DELETED'], 'compliance_people_present_deleted'), (api.audit_trail.enums.AuditType['UPDATED_GOOD_ON_DESTINATION_MATRIX'], 'updated_good_on_destination_matrix'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_GOOD_USAGE'], 'licence_updated_good_usage'), (api.audit_trail.enums.AuditType['OGEL_REISSUED'], 'ogel_reissued'), (api.audit_trail.enums.AuditType['LICENCE_UPDATED_STATUS'], 'licence_updated_status'), (api.audit_trail.enums.AuditType['DOCUMENT_ON_ORGANISATION_CREATE'], 'document_on_organisation_create')], db_index=True, max_length=255),
),
migrations.AlterField(
model_name='audit',
name='payload',
field=models.JSONField(default=dict),
),
]
| 456.364066
| 10,562
| 0.817672
| 24,721
| 193,042
| 5.910036
| 0.014765
| 0.1334
| 0.170217
| 0.235561
| 0.969254
| 0.965593
| 0.964504
| 0.963115
| 0.961801
| 0.960356
| 0
| 0.003321
| 0.042255
| 193,042
| 422
| 10,563
| 457.445498
| 0.786911
| 0.002051
| 0
| 0.5
| 1
| 0
| 0.497804
| 0.379328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019886
| false
| 0
| 0.034091
| 0
| 0.088068
| 0.053977
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e32c26d0e13f72a0c3472f17d920f95e1cb2754e
| 731
|
py
|
Python
|
service/smartNutrition/api/__init__.py
|
Smart-Nutrition/smart-nutrition
|
753df05b49aac8dca3ed557ec239140ff8c7f862
|
[
"Apache-2.0"
] | 1
|
2017-12-13T18:42:57.000Z
|
2017-12-13T18:42:57.000Z
|
service/smartNutrition/api/__init__.py
|
Smart-Nutrition/smart-nutrition
|
753df05b49aac8dca3ed557ec239140ff8c7f862
|
[
"Apache-2.0"
] | 10
|
2017-12-16T16:26:22.000Z
|
2018-09-10T02:31:47.000Z
|
service/smartNutrition/api/__init__.py
|
Smart-Nutrition/smart-nutrition
|
753df05b49aac8dca3ed557ec239140ff8c7f862
|
[
"Apache-2.0"
] | null | null | null |
"""API endpoints"""
from smartNutrition.api.app import login_api_route
from smartNutrition.api.app import logout_api_route
from smartNutrition.api.app import users_api_route
from smartNutrition.api.app import trip_api_route
from smartNutrition.api.app import summary_api_route
from smartNutrition.api.app import macronutrients_api_route
from smartNutrition.api.app import foodgroups_api_route
from smartNutrition.api.app import providers_api_route
from smartNutrition.api.app import provider_api_route
from smartNutrition.api.app import manual_trips_api_route
from smartNutrition.api.app import manual_trip_api_route
from smartNutrition.api.nutritionix import compute_trip_nutrition
from smartNutrition.api.nutritionix import ureg
| 48.733333
| 65
| 0.882353
| 106
| 731
| 5.839623
| 0.207547
| 0.378029
| 0.441034
| 0.426494
| 0.830372
| 0.693053
| 0.633279
| 0.142165
| 0
| 0
| 0
| 0
| 0.073871
| 731
| 14
| 66
| 52.214286
| 0.914328
| 0.017784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e346b2c3b4d031e750df1b0a2783aebdda52b18d
| 5,816
|
py
|
Python
|
vows/test_tags.py
|
htmue/python-autocheck
|
d97b2b21697e7bada5e33229efe075a668b3bbf3
|
[
"Unlicense"
] | 4
|
2015-04-20T23:55:11.000Z
|
2016-08-26T10:38:11.000Z
|
vows/test_tags.py
|
htmue/python-autocheck
|
d97b2b21697e7bada5e33229efe075a668b3bbf3
|
[
"Unlicense"
] | null | null | null |
vows/test_tags.py
|
htmue/python-autocheck
|
d97b2b21697e7bada5e33229efe075a668b3bbf3
|
[
"Unlicense"
] | null | null | null |
# -*- coding:utf-8 -*-
# Created by Hans-Thomas on 2011-06-01.
#=============================================================================
# test_tags.py --- Tag support vows
#=============================================================================
from __future__ import absolute_import, unicode_literals
from should_dsl import should
from six.moves import map
from autocheck.compat import unittest
from autocheck.tagexpression import TagExpression
from autocheck.tags import tag, get_tags
class TagDecoratorVows(unittest.TestCase):
def test_markes_test_item_by_test_case_decorator(self):
@tag('include')
class Test(unittest.TestCase):
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(set(['include']))
def test_markes_test_item_by_test_method_decorator(self):
class Test(unittest.TestCase):
@tag('include')
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(set(['include']))
def test_combines_tags_from_method_and_class(self):
@tag('class')
class Test(unittest.TestCase):
@tag('method')
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(set(['class', 'method']))
def test_combines_tags_from_class_and_superclass(self):
@tag('super')
class Super(unittest.TestCase):
pass
@tag('class')
class Test(Super):
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(set(['class', 'super']))
def test_combines_tags_from_method_and_superclass(self):
@tag('super')
class Super(unittest.TestCase):
pass
class Test(Super):
@tag('method')
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(set(['method', 'super']))
def test_combines_tags_from_class_and_mixin(self):
@tag('mixin')
class Mixin(object):
pass
@tag('class')
class Test(Mixin, unittest.TestCase):
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(set(['class', 'mixin']))
def test_combines_tags_from_class_superclass_and_mixin(self):
@tag('mixin')
class Mixin(object):
pass
@tag('super')
class Super(unittest.TestCase):
pass
@tag('class')
class Test(Mixin, Super):
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(
set(['class', 'mixin', 'super']))
def test_combines_tags_from_method_class_superclass_and_mixin(self):
@tag('mixin')
class Mixin(object):
pass
@tag('super')
class Super(unittest.TestCase):
pass
@tag('class')
class Test(Mixin, Super):
@tag('method')
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(
set(['class', 'method', 'mixin', 'super']))
def test_does_not_mix_tags_from_different_classes_with_same_mixin(self):
@tag('mixin')
class Mixin(object):
pass
@tag('other')
class Other(Mixin, unittest.TestCase):
pass
@tag('class')
class Test(Mixin, unittest.TestCase):
@tag('method')
def test_method(self):
pass
test_item = Test('test_method')
get_tags(test_item) |should| be_equal_to(
set(['class', 'method', 'mixin']))
class TagFilterVows(unittest.TestCase):
def test_includes_by_test_case_decorator(self):
@tag('include')
class Test(unittest.TestCase):
def test_method(self):
pass
suite = unittest.TestSuite([Test('test_method')])
tags = TagExpression('@include')
filtered = tags.filter_suite(suite)
map(str, filtered) |should| each_be_equal_to([
'test_method (vows.test_tags.Test)',
])
def test_excludes_by_test_case_decorator(self):
@tag('include')
class Test(unittest.TestCase):
def test_method(self):
pass
suite = unittest.TestSuite([Test('test_method')])
tags = TagExpression('~@include')
filtered = tags.filter_suite(suite)
map(str, filtered) |should| each_be_equal_to([])
def test_includes_by_test_method_decorator(self):
class Test(unittest.TestCase):
@tag('include')
def test_method(self):
pass
suite = unittest.TestSuite([Test('test_method')])
tags = TagExpression('@include')
filtered = tags.filter_suite(suite)
map(str, filtered) |should| each_be_equal_to([
'test_method (vows.test_tags.Test)',
])
def test_excludes_by_test_method_decorator(self):
class Test(unittest.TestCase):
@tag('include')
def test_method(self):
pass
suite = unittest.TestSuite([Test('test_method')])
tags = TagExpression('~@include')
filtered = tags.filter_suite(suite)
map(str, filtered) |should| each_be_equal_to([])
#.............................................................................
# test_tags.py
| 27.433962
| 78
| 0.563274
| 639
| 5,816
| 4.838811
| 0.112676
| 0.100259
| 0.054657
| 0.071475
| 0.85511
| 0.842497
| 0.833441
| 0.773933
| 0.754204
| 0.741268
| 0
| 0.002168
| 0.286279
| 5,816
| 211
| 79
| 27.563981
| 0.742713
| 0.058459
| 0
| 0.791667
| 0
| 0
| 0.089978
| 0.007681
| 0
| 0
| 0
| 0
| 0
| 1
| 0.180556
| false
| 0.152778
| 0.041667
| 0
| 0.388889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e36a3c4de66264e6f20eb940e42ebcb1d26b1f2c
| 39,166
|
py
|
Python
|
test/model_test.py
|
aastrand/slapi
|
dedd11b28b75d2572ab71eb8baff5d1b9b5561a3
|
[
"Apache-2.0"
] | 1
|
2017-04-06T10:39:11.000Z
|
2017-04-06T10:39:11.000Z
|
test/model_test.py
|
aastrand/slapi
|
dedd11b28b75d2572ab71eb8baff5d1b9b5561a3
|
[
"Apache-2.0"
] | 3
|
2021-05-19T19:51:54.000Z
|
2021-05-23T10:13:36.000Z
|
test/model_test.py
|
aastrand/slapi
|
dedd11b28b75d2572ab71eb8baff5d1b9b5561a3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import pprint
import copy
import datetime
import unittest
from mock import patch, Mock
from slapi import model
DEPARTURE_JSON_TESTINPUT = u"""
{
"ResponseData": {
"StopPointDeviations": [
{
"Deviation": {
"ImportanceLevel": 9,
"Consequence": null,
"Text": "För avgångstider, var god se sl.se eller tidtabell på hållplatsen."
},
"StopInfo": {
"GroupOfLine": "Tvärbanan",
"TransportMode": "TRAM",
"StopAreaName": "Sundbybergs centrum",
"StopAreaNumber": 0
}
}
],
"Ships": [],
"Trams": [],
"Trains": [
{
"SiteId": 9325,
"Destination": "Bålsta",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "Nu",
"JourneyDirection": 2,
"SecondaryDestinationName": null,
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6032,
"StopPointDesignation": "2",
"TimeTabledDateTime": "2015-02-17T13:06:00",
"ExpectedDateTime": "2015-02-17T13:07:07"
},
{
"SiteId": 9325,
"Destination": "Västerhaninge",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "2 min",
"JourneyDirection": 1,
"SecondaryDestinationName": "Stockholm C",
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6031,
"StopPointDesignation": "3",
"TimeTabledDateTime": "2015-02-17T13:09:00",
"ExpectedDateTime": "2015-02-17T13:09:00"
},
{
"SiteId": 9325,
"Destination": "Kungsängen",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "14 min",
"JourneyDirection": 2,
"SecondaryDestinationName": null,
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6032,
"StopPointDesignation": "2",
"TimeTabledDateTime": "2015-02-17T13:21:00",
"ExpectedDateTime": "2015-02-17T13:21:00"
},
{
"SiteId": 9325,
"Destination": "Nynäshamn",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "17 min",
"JourneyDirection": 1,
"SecondaryDestinationName": "Stockholm C",
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6031,
"StopPointDesignation": "3",
"TimeTabledDateTime": "2015-02-17T13:24:00",
"ExpectedDateTime": "2015-02-17T13:24:00"
},
{
"SiteId": 9325,
"Destination": "Fjärrtåg",
"LineNumber": "9002",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "13:32",
"JourneyDirection": 2,
"SecondaryDestinationName": null,
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6032,
"StopPointDesignation": "2",
"TimeTabledDateTime": "2015-02-17T13:32:00",
"ExpectedDateTime": "2015-02-17T13:32:00"
},
{
"SiteId": 9325,
"Destination": "Bålsta",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "29 min",
"JourneyDirection": 2,
"SecondaryDestinationName": null,
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6032,
"StopPointDesignation": "2",
"TimeTabledDateTime": "2015-02-17T13:36:00",
"ExpectedDateTime": "2015-02-17T13:36:00"
},
{
"SiteId": 9325,
"Destination": "Västerhaninge",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "13:39",
"JourneyDirection": 1,
"SecondaryDestinationName": "Stockholm C",
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6031,
"StopPointDesignation": "3",
"TimeTabledDateTime": "2015-02-17T13:39:00",
"ExpectedDateTime": "2015-02-17T13:39:00"
},
{
"SiteId": 9325,
"Destination": "Fjärrtåg",
"LineNumber": "9001",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "13:42",
"JourneyDirection": 1,
"SecondaryDestinationName": null,
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6031,
"StopPointDesignation": "3",
"TimeTabledDateTime": "2015-02-17T13:42:00",
"ExpectedDateTime": "2015-02-17T13:42:00"
},
{
"SiteId": 9325,
"Destination": "Kungsängen",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "13:51",
"JourneyDirection": 2,
"SecondaryDestinationName": null,
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6032,
"StopPointDesignation": "2",
"TimeTabledDateTime": "2015-02-17T13:51:00",
"ExpectedDateTime": "2015-02-17T13:51:00"
},
{
"SiteId": 9325,
"Destination": "Nynäshamn",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "13:54",
"JourneyDirection": 1,
"SecondaryDestinationName": "Stockholm C",
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6031,
"StopPointDesignation": "3",
"TimeTabledDateTime": "2015-02-17T13:54:00",
"ExpectedDateTime": "2015-02-17T13:54:00"
},
{
"SiteId": 9325,
"Destination": "Bålsta",
"LineNumber": "35",
"TransportMode": "TRAIN",
"Deviations": null,
"DisplayTime": "14:06",
"JourneyDirection": 2,
"SecondaryDestinationName": null,
"StopAreaName": "Sundbyberg",
"StopAreaNumber": 6031,
"StopPointNumber": 6032,
"StopPointDesignation": "2",
"TimeTabledDateTime": "2015-02-17T14:06:00",
"ExpectedDateTime": "2015-02-17T14:06:00"
}
],
"Buses": [
{
"SiteId": 9325,
"Destination": "Rissne",
"LineNumber": "504",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "4 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:11:00",
"ExpectedDateTime": "2015-02-17T13:11:00"
},
{
"SiteId": 9325,
"Destination": "Liljeholmen",
"LineNumber": "152",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "5 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:12:02",
"ExpectedDateTime": "2015-02-17T13:12:02"
},
{
"SiteId": 9325,
"Destination": "Danderyds sjukhus",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "6 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:09:57",
"ExpectedDateTime": "2015-02-17T13:12:17"
},
{
"SiteId": 9325,
"Destination": "Odenplan",
"LineNumber": "515",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:15",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:15:00",
"ExpectedDateTime": "2015-02-17T13:15:00"
},
{
"SiteId": 9325,
"Destination": "Bromma flygplats",
"LineNumber": "152",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "8 min",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T13:13:57",
"ExpectedDateTime": "2015-02-17T13:15:11"
},
{
"SiteId": 9325,
"Destination": "Karolinska sjukhuset",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "9 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50304,
"StopPointDesignation": "B",
"TimeTabledDateTime": "2015-02-17T13:14:00",
"ExpectedDateTime": "2015-02-17T13:15:13"
},
{
"SiteId": 9325,
"Destination": "Hallonbergen",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "9 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:16:00",
"ExpectedDateTime": "2015-02-17T13:16:00"
},
{
"SiteId": 9325,
"Destination": "Solna centrum",
"LineNumber": "113",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "10 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:16:11",
"ExpectedDateTime": "2015-02-17T13:16:22"
},
{
"SiteId": 9325,
"Destination": "Brommaplan",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "12 min",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T13:16:56",
"ExpectedDateTime": "2015-02-17T13:18:49"
},
{
"SiteId": 9325,
"Destination": "Danderyds sjukhus",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:24",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:24:57",
"ExpectedDateTime": "2015-02-17T13:24:57"
},
{
"SiteId": 9325,
"Destination": "Rissne",
"LineNumber": "504",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "20 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:26:00",
"ExpectedDateTime": "2015-02-17T13:26:18"
},
{
"SiteId": 9325,
"Destination": "Blackebergs gård",
"LineNumber": "113",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:27",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T13:27:33",
"ExpectedDateTime": "2015-02-17T13:27:33"
},
{
"SiteId": 9325,
"Destination": "Karolinska sjukhuset",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "22 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50304,
"StopPointDesignation": "B",
"TimeTabledDateTime": "2015-02-17T13:29:00",
"ExpectedDateTime": "2015-02-17T13:29:00"
},
{
"SiteId": 9325,
"Destination": "Odenplan",
"LineNumber": "515",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:30",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:30:00",
"ExpectedDateTime": "2015-02-17T13:30:00"
},
{
"SiteId": 9325,
"Destination": "Hallonbergen",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "24 min",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:31:00",
"ExpectedDateTime": "2015-02-17T13:31:00"
},
{
"SiteId": 9325,
"Destination": "Brommaplan",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "29 min",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T13:31:56",
"ExpectedDateTime": "2015-02-17T13:35:33"
},
{
"SiteId": 9325,
"Destination": "Danderyds sjukhus",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:39",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:39:57",
"ExpectedDateTime": "2015-02-17T13:39:57"
},
{
"SiteId": 9325,
"Destination": "Rissne",
"LineNumber": "504",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:41",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:41:00",
"ExpectedDateTime": "2015-02-17T13:41:00"
},
{
"SiteId": 9325,
"Destination": "Liljeholmen",
"LineNumber": "152",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:42",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:42:02",
"ExpectedDateTime": "2015-02-17T13:42:02"
},
{
"SiteId": 9325,
"Destination": "Karolinska sjukhuset",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:44",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50304,
"StopPointDesignation": "B",
"TimeTabledDateTime": "2015-02-17T13:44:00",
"ExpectedDateTime": "2015-02-17T13:44:00"
},
{
"SiteId": 9325,
"Destination": "Bromma flygplats",
"LineNumber": "152",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:44",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T13:44:33",
"ExpectedDateTime": "2015-02-17T13:44:33"
},
{
"SiteId": 9325,
"Destination": "Odenplan",
"LineNumber": "515",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:45",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:45:00",
"ExpectedDateTime": "2015-02-17T13:45:00"
},
{
"SiteId": 9325,
"Destination": "Solna centrum",
"LineNumber": "113",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:46",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:46:11",
"ExpectedDateTime": "2015-02-17T13:46:11"
},
{
"SiteId": 9325,
"Destination": "Hallonbergen",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:46",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:46:00",
"ExpectedDateTime": "2015-02-17T13:46:20"
},
{
"SiteId": 9325,
"Destination": "Brommaplan",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:46",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T13:46:56",
"ExpectedDateTime": "2015-02-17T13:46:56"
},
{
"SiteId": 9325,
"Destination": "Danderyds sjukhus",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:54",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50242,
"StopPointDesignation": "C",
"TimeTabledDateTime": "2015-02-17T13:54:57",
"ExpectedDateTime": "2015-02-17T13:54:57"
},
{
"SiteId": 9325,
"Destination": "Rissne",
"LineNumber": "504",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:56",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T13:56:00",
"ExpectedDateTime": "2015-02-17T13:56:00"
},
{
"SiteId": 9325,
"Destination": "Blackebergs gård",
"LineNumber": "113",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:57",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T13:57:33",
"ExpectedDateTime": "2015-02-17T13:57:33"
},
{
"SiteId": 9325,
"Destination": "Karolinska sjukhuset",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "13:59",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50304,
"StopPointDesignation": "B",
"TimeTabledDateTime": "2015-02-17T13:59:00",
"ExpectedDateTime": "2015-02-17T13:59:00"
},
{
"SiteId": 9325,
"Destination": "Odenplan",
"LineNumber": "515",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "14:00",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T14:00:00",
"ExpectedDateTime": "2015-02-17T14:00:00"
},
{
"SiteId": 9325,
"Destination": "Hallonbergen",
"LineNumber": "506",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "14:01",
"JourneyDirection": 1,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs station",
"StopAreaNumber": 12346,
"StopPointNumber": 50439,
"StopPointDesignation": "A",
"TimeTabledDateTime": "2015-02-17T14:01:00",
"ExpectedDateTime": "2015-02-17T14:01:00"
},
{
"SiteId": 9325,
"Destination": "Brommaplan",
"LineNumber": "509",
"TransportMode": "BUS",
"Deviations": null,
"DisplayTime": "14:01",
"JourneyDirection": 2,
"GroupOfLine": null,
"StopAreaName": "Sundbybergs torg",
"StopAreaNumber": 50242,
"StopPointNumber": 50243,
"StopPointDesignation": "D",
"TimeTabledDateTime": "2015-02-17T14:01:56",
"ExpectedDateTime": "2015-02-17T14:01:56"
}
],
"Metros": [
{
"SiteId": 9325,
"JourneyDirection": 1,
"Destination": "Hjulsta",
"LineNumber": "10",
"StopAreaName": "Sundbybergs centrum",
"GroupOfLine": "Tunnelbanans blå linje",
"DisplayTime": "Nu",
"SafeDestinationName": "Hjulsta",
"GroupOfLineId": 3,
"DepartureGroupId": 1,
"PlatformMessage": null,
"TransportMode": "METRO"
},
{
"SiteId": 9325,
"JourneyDirection": 1,
"Destination": "Hjulsta",
"LineNumber": "10",
"StopAreaName": "Sundbybergs centrum",
"GroupOfLine": "Tunnelbanans blå linje",
"DisplayTime": "11 min",
"SafeDestinationName": "Hjulsta",
"GroupOfLineId": 3,
"DepartureGroupId": 1,
"PlatformMessage": null,
"TransportMode": "METRO"
},
{
"SiteId": 9325,
"JourneyDirection": 1,
"Destination": "Hjulsta",
"LineNumber": "10",
"StopAreaName": "Sundbybergs centrum",
"GroupOfLine": "Tunnelbanans blå linje",
"DisplayTime": "21 min",
"SafeDestinationName": "Hjulsta",
"GroupOfLineId": 3,
"DepartureGroupId": 1,
"PlatformMessage": null,
"TransportMode": "METRO"
}
],
"DataAge": 25,
"LatestUpdate": "2015-02-17T13:05:47"
},
"ExecutionTime": 889,
"Message": null,
"StatusCode": 0
}
"""
SITE_JSON_TEST_INPUT = u"""
{
"ResponseData": [
{
"Y": "59360842",
"X": "17969256",
"Type": "Station",
"SiteId": "9325",
"Name": "Sundbyberg (Sundbyberg)"
}
],
"ExecutionTime": 0,
"Message": null,
"StatusCode": 0
}
"""
SITE_JSON_TEST_INPUT_LONG = u"""
{
"ResponseData": [
{
"Y": "59360842",
"X": "17969256",
"Type": "Station",
"SiteId": "9325",
"Name": "Sundbyberg (Sundbyberg)"
},
{
"Y": "59360842",
"X": "17969256",
"Type": "Station",
"SiteId": "9325",
"Name": "Sundbybergs centrum (Sundbyberg)"
},
{
"Y": "59360842",
"X": "17969256",
"Type": "Station",
"SiteId": "9325",
"Name": "Sundbybergs station (Sundbyberg)"
}
],
"ExecutionTime": 0,
"Message": null,
"StatusCode": 0
}
"""
class MyPrettyPrinter(pprint.PrettyPrinter):
"""
Pretty printer subclass used for debugging parsing input.
Avoids some unicode shanigans.
"""
def format(self, object, context, maxlevels, level):
return pprint.PrettyPrinter.format(self, object, context, maxlevels, level)
class ModelTest(unittest.TestCase):
def test_compile_whitelist(self):
expected = {'Buses': set(['518', '119']),
'Trains': set(['11', '10', '12'])}
self.assertEqual(model.compile_whitelist({'trains': '10,11,12',
'buses': '119,518'}),
expected)
expected = {'Buses': set(['518', '119']),
'Trains': set(['11', '10', '12']),
'Trams': set(['grisar'])}
self.assertEqual(model.compile_whitelist({'trains': '10,11,12',
'buses': '119,518',
'crap': 'johnny',
'trams': 'grisar'}),
expected)
def test_parse_displayrow(self):
expected = [{u'destination': u'Hjulsta', u'displaytime': u'11 min', u'linenumber': u'10'},
{u'destination': u'Hjulsta', u'displaytime': u'21 min.', u'linenumber': u'10'}]
self.assertEqual(model.parse_displayrow(u'10 Hjulsta 11 min, 10 Hjulsta 21 min.'),
expected)
expected = [{u'linenumber': u'10',
u'destination': u'Kungsträdg.', u'displaytime': u'14 min.'}]
self.assertEqual(model.parse_displayrow(u'10 Kungsträdg. 14 min.'),
expected)
expected = [{u'linenumber': u'10',
u'destination': u'Kungsträdg.', u'displaytime': u'1 min'}]
self.assertEqual(model.parse_displayrow(u'10 Kungsträdg. 1 min'),
expected)
expected = [{u'linenumber': u'10',
u'destination': u'Kungsträdg.', u'displaytime': u'1 min'}]
self.assertEqual(model.parse_displayrow(u'10 Kungsträdg. 1 min'),
expected)
expected = []
self.assertEqual(model.parse_displayrow(u'Korta tåg, vänligen gå mot mitten av plattformen. Short trains, please continue to the middle of the platform.'),
expected)
expected = [{u'linenumber': u'10', u'destination': u'Kungsträdg.', u'displaytime': u'01:49'},
{u'linenumber': u'10', u'destination': u'Kungsträdg.', u'displaytime': u'02:19'}]
self.assertEqual(model.parse_displayrow(u'10 Kungsträdg. 01:49 10 Kungsträdg. 02:19'),
expected)
expected = [{u'destination': u'Hjulsta', u'displaytime': u'8 min', u'linenumber': u'10'},
{u'destination': u'Hjulsta', u'displaytime': u'16 min.', u'linenumber': u'10'}]
self.assertEqual(model.parse_displayrow(u'10 Hjulsta 8 min, 10 Hjulsta 16 min.'),
expected)
self.assertEqual(model.parse_displayrow({}), [])
@patch('slapi.model.get_now')
def test_parse_response(self, now_mock):
now_mock.return_value = datetime.datetime(2013, 12, 1, 00, 30)
expected = [{'transportmode': 'TRAIN', 'linenumber': '35', 'destination': 'Bålsta', 'displaytime': 'Nu', 'time': 0}, {'transportmode': 'METRO', 'linenumber': '10', 'destination': 'Hjulsta', 'displaytime': 'Nu', 'groupofline': 'Tunnelbanans blå linje', 'time': 0}, {'transportmode': 'TRAIN', 'linenumber': '35', 'destination': 'Västerhaninge', 'displaytime': '2 min', 'time': 2}, {'transportmode': 'BUS', 'linenumber': '504', 'destination': 'Rissne', 'displaytime': '4 min', 'groupofline': None, 'time': 4}, {'transportmode': 'BUS', 'linenumber': '152', 'destination': 'Liljeholmen', 'displaytime': '5 min', 'groupofline': None, 'time': 5}, {'transportmode': 'BUS', 'linenumber': '509', 'destination': 'Danderyds sjukhus', 'displaytime': '6 min', 'groupofline': None, 'time': 6}, {'transportmode': 'BUS', 'linenumber': '152', 'destination': 'Bromma flygplats', 'displaytime': '8 min', 'groupofline': None, 'time': 8}, {'transportmode': 'BUS', 'linenumber': '506', 'destination': 'Karolinska sjukhuset', 'displaytime': '9 min', 'groupofline': None, 'time': 9}, {'transportmode': 'BUS', 'linenumber': '506', 'destination': 'Hallonbergen', 'displaytime': '9 min', 'groupofline': None, 'time': 9}, {'transportmode': 'BUS', 'linenumber': '113', 'destination': 'Solna centrum', 'displaytime': '10 min', 'groupofline': None, 'time': 10}, {
'transportmode': 'METRO', 'linenumber': '10', 'destination': 'Hjulsta', 'displaytime': '11 min', 'groupofline': 'Tunnelbanans blå linje', 'time': 11}, {'transportmode': 'BUS', 'linenumber': '509', 'destination': 'Brommaplan', 'displaytime': '12 min', 'groupofline': None, 'time': 12}, {'transportmode': 'TRAIN', 'linenumber': '35', 'destination': 'Kungsängen', 'displaytime': '14 min', 'time': 14}, {'transportmode': 'TRAIN', 'linenumber': '35', 'destination': 'Nynäshamn', 'displaytime': '17 min', 'time': 17}, {'transportmode': 'BUS', 'linenumber': '504', 'destination': 'Rissne', 'displaytime': '20 min', 'groupofline': None, 'time': 20}, {'transportmode': 'METRO', 'linenumber': '10', 'destination': 'Hjulsta', 'displaytime': '21 min', 'groupofline': 'Tunnelbanans blå linje', 'time': 21}, {'transportmode': 'BUS', 'linenumber': '506', 'destination': 'Karolinska sjukhuset', 'displaytime': '22 min', 'groupofline': None, 'time': 22}, {'transportmode': 'BUS', 'linenumber': '506', 'destination': 'Hallonbergen', 'displaytime': '24 min', 'groupofline': None, 'time': 24}, {'transportmode': 'TRAIN', 'linenumber': '35', 'destination': 'Bålsta', 'displaytime': '29 min', 'time': 29}, {'transportmode': 'BUS', 'linenumber': '509', 'destination': 'Brommaplan', 'displaytime': '29 min', 'groupofline': None, 'time': 29}]
expected.sort(key=lambda x: x['time'])
out = model.parse_json_response(DEPARTURE_JSON_TESTINPUT)
out.sort(key=lambda x: x['time'])
print(out)
self.assertEqual(out, expected)
@patch('slapi.model.get_now')
def test_convert_time(self, now_mock):
now_mock.return_value = datetime.datetime(2013, 12, 1, 13, 2)
self.assertEqual(model.convert_time('13:10'), 8)
now_mock.return_value = datetime.datetime(2013, 12, 1, 23, 42)
self.assertEqual(model.convert_time('00:15'), 33)
now_mock.return_value = datetime.datetime(2013, 12, 1, 23, 42, 30)
self.assertEqual(model.convert_time('23:42'), 0)
self.assertEqual(model.convert_time('10 min'), 10)
self.assertEqual(model.convert_time('1 min.'), 1)
self.assertEqual(model.convert_time('100'), 100)
self.assertEqual(model.convert_time('Nu'), 0)
self.assertEqual(model.convert_time('-1 min'), -1)
self.assertEqual(model.convert_time('-'), -1)
@patch('slapi.model.requests')
@patch('slapi.model.get_now')
def test_get_departure(self, now_mock, req_mock):
now_mock.return_value = datetime.datetime(2013, 12, 1, 13, 2)
req_mock.get = Mock()
req_mock.get.return_value = Mock()
req_mock.get.return_value.status_code = 500
self.assertRaises(model.ApiException, model.get_departure,
'http://test/%s/%s', 31337, 'deadbeef')
req_mock.get.return_value.status_code = 200
req_mock.get.return_value.text = DEPARTURE_JSON_TESTINPUT
out = model.get_departure('http://test/%s/%s', 31337, 'deadbeef')
self.assertEqual(type(out), list)
self.assertEqual(len(out), 44)
for item in out:
self.assertEqual(type(item), dict)
self.assertTrue(len(item) > 0)
@patch('slapi.model.requests')
@patch('slapi.model.get_now')
def test_get_departures(self, now_mock, req_mock):
now_mock.return_value = datetime.datetime(2013, 12, 1, 13, 2)
req_mock.get = Mock()
req_mock.get.return_value = Mock()
req_mock.get.return_value.status_code = 500
self.assertRaises(model.ApiException, model.get_departures,
31337, 'deadbeef')
responses = [DEPARTURE_JSON_TESTINPUT]
def mock_get(*args):
resp = Mock()
resp.status_code = 200
resp.text = responses.pop(0)
return resp
req_mock.get = mock_get
out = model.get_departures(31337, 'deadbeef')
self.assertEqual(type(out), list)
self.assertEqual(len(out), 44)
for item in out:
self.assertEqual(type(item), dict)
self.assertTrue(len(item) > 0)
@patch('slapi.model.get_now')
def test_handle_flapping_displays(self, now_mock):
now_mock.return_value = datetime.datetime(2013, 12, 1, 00, 26)
cached = [{u'destination': u'Kungsträdg.',
u'displaytime': u'5 min',
u'groupofline': u'Tunnelbanans blå linje',
u'linenumber': u'10',
u'stationname': u'Sundbybergs centrum',
u'time': 2,
u'transportmode': u'METRO'},
{u'destination': u'Kungsträdg.',
u'displaytime': u'5 min',
u'groupofline': u'Tunnelbanans blå linje',
u'linenumber': u'10',
u'stationname': u'Sundbybergs centrum',
u'time': 5,
u'transportmode': u'METRO'},
{u'destination': u'Kungsträdg.',
u'displaytime': u'12 min.',
u'groupofline': u'Tunnelbanans blå linje',
u'linenumber': u'10',
u'stationname': u'Sundbybergs centrum',
u'time': 12,
u'transportmode': u'METRO'},
{u'destination': u'Hjulsta',
u'displaytime': u'1 min',
u'groupofline': u'Tunnelbanans blå linje',
u'linenumber': u'10',
u'stationname': u'Sundbybergs centrum',
u'time': 4,
u'transportmode': u'METRO'},
{u'destination': u'Hjulsta',
u'displaytime': u'8 min',
u'groupofline': u'Tunnelbanans blå linje',
u'linenumber': u'10',
u'stationname': u'Sundbybergs centrum',
u'time': 8,
u'transportmode': u'METRO'},
{u'destination': u'Hjulsta',
u'displaytime': u'16 min.',
u'groupofline': u'Tunnelbanans blå linje',
u'linenumber': u'10',
u'stationname': u'Sundbybergs centrum',
u'time': 16,
u'transportmode': u'METRO'}]
# let two minutes pass
data = copy.deepcopy(cached)
for d in data:
d[u'time'] -= 2
# first time, no flaps
self.assertEqual(model.handle_flapping_displays('4711', data, {}),
[])
# make the two hjulsta departures flap
ts = datetime.datetime(2013, 12, 1, 00, 24)
expected = []
expected.append(data.pop(4))
expected.append(data.pop(4))
expected[0][u'firstseen'] = ts
expected[1][u'firstseen'] = ts
expected[0][u'firsttime'] = expected[0][u'time'] + 2
expected[1][u'firsttime'] = expected[1][u'time'] + 2
cache = {'4711': (ts, cached)}
# expect them back
self.assertEqual(model.handle_flapping_displays('4711', data, cache),
expected)
# age the cache 10 mins, now only the 16 min departure is relevant
ts = datetime.datetime(2013, 12, 1, 00, 24)
cache = {'4711': (ts, cached)}
expected.pop(0)
expected[0][u'time'] -= 8
expected[0][u'firstseen'] = ts
expected[0][u'firsttime'] = expected[0][u'time'] + 10
now_mock.return_value = datetime.datetime(2013, 12, 1, 00, 34)
self.assertEqual(model.handle_flapping_displays('4711', data, cache),
expected)
def test_parse_site_response(self):
expected = [{u'name': u'Sundbyberg (Sundbyberg)'}]
self.assertEqual(model.parse_json_site_response(SITE_JSON_TEST_INPUT),
expected)
expected = [{u'name': u'Sundbyberg (Sundbyberg)'},
{u'name': u'Sundbybergs centrum (Sundbyberg)'},
{u'name': u'Sundbybergs station (Sundbyberg)'}]
self.assertEqual(model.parse_json_site_response(SITE_JSON_TEST_INPUT_LONG),
expected)
expected = []
self.assertEqual(model.parse_json_site_response('{}'),
expected)
@patch('slapi.model.requests')
def test_get_station_name(self, req_mock):
req_mock.get = Mock()
req_mock.get.return_value = Mock()
req_mock.get.return_value.status_code = 500
self.assertRaises(model.ApiException, model.get_station_name,
31337, 'deadbeef')
def mock_get(*args):
resp = Mock()
resp.status_code = 200
resp.text = '{}'
return resp
req_mock.get = mock_get
self.assertRaises(model.ApiException, model.get_station_name,
31337, 'deadbeef')
model.cache.clear()
responses = [SITE_JSON_TEST_INPUT]
def mock_get(*args):
resp = Mock()
resp.status_code = 200
resp.text = responses.pop(0)
return resp
req_mock.get = mock_get
self.assertEqual(model.get_station_name(31337, 'deadbeef'),
u'Sundbyberg (Sundbyberg)')
| 35.540835
| 1,339
| 0.544145
| 3,396
| 39,166
| 6.229682
| 0.090989
| 0.024674
| 0.041076
| 0.05346
| 0.894734
| 0.805493
| 0.7725
| 0.729628
| 0.70727
| 0.70207
| 0
| 0.099383
| 0.296839
| 39,166
| 1,101
| 1,340
| 35.573115
| 0.668809
| 0.006945
| 0
| 0.659245
| 0
| 0.000968
| 0.740281
| 0.106692
| 0
| 0
| 0
| 0
| 0.037754
| 1
| 0.012585
| false
| 0
| 0.006776
| 0.000968
| 0.025169
| 0.003872
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8b5b8729984345a88881250047ffa87aecd401b9
| 32,842
|
py
|
Python
|
operators/azure-service-operator/python/pulumi_pulumi_kubernetes_crds_operators_azure_service_operator/azure/v1alpha2/outputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | null | null | null |
operators/azure-service-operator/python/pulumi_pulumi_kubernetes_crds_operators_azure_service_operator/azure/v1alpha2/outputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | 2
|
2020-09-18T17:12:23.000Z
|
2020-12-30T19:40:56.000Z
|
operators/azure-service-operator/python/pulumi_pulumi_kubernetes_crds_operators_azure_service_operator/azure/v1alpha2/outputs.py
|
pulumi/pulumi-kubernetes-crds
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by crd2pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'BlobContainerSpec',
'BlobContainerStatus',
'MySQLServerSpec',
'MySQLServerSpecReplicaProperties',
'MySQLServerSpecSku',
'MySQLServerSpecStorageProfile',
'MySQLServerStatus',
'PostgreSQLServerSpec',
'PostgreSQLServerSpecReplicaProperties',
'PostgreSQLServerSpecSku',
'PostgreSQLServerSpecStorageProfile',
'PostgreSQLServerStatus',
]
@pulumi.output_type
class BlobContainerSpec(dict):
"""
BlobContainerSpec defines the desired state of BlobContainer
"""
def __init__(__self__, *,
location: str,
resource_group: str,
access_level: Optional[str] = None,
account_name: Optional[str] = None):
"""
BlobContainerSpec defines the desired state of BlobContainer
:param str location: INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "make" to regenerate code after modifying this file
:param str access_level: PublicAccess enumerates the values for public access.
"""
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "resource_group", resource_group)
if access_level is not None:
pulumi.set(__self__, "access_level", access_level)
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
@property
@pulumi.getter
def location(self) -> str:
"""
INSERT ADDITIONAL SPEC FIELDS - desired state of cluster Important: Run "make" to regenerate code after modifying this file
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> str:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="accessLevel")
def access_level(self) -> Optional[str]:
"""
PublicAccess enumerates the values for public access.
"""
return pulumi.get(self, "access_level")
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[str]:
return pulumi.get(self, "account_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BlobContainerStatus(dict):
"""
ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
def __init__(__self__, *,
completed: Optional[str] = None,
contains_update: Optional[bool] = None,
failed_provisioning: Optional[bool] = None,
flattened_secrets: Optional[bool] = None,
message: Optional[str] = None,
output: Optional[str] = None,
polling_url: Optional[str] = None,
provisioned: Optional[bool] = None,
provisioning: Optional[bool] = None,
requested: Optional[str] = None,
resource_id: Optional[str] = None,
spec_hash: Optional[str] = None,
state: Optional[str] = None):
"""
ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
if completed is not None:
pulumi.set(__self__, "completed", completed)
if contains_update is not None:
pulumi.set(__self__, "contains_update", contains_update)
if failed_provisioning is not None:
pulumi.set(__self__, "failed_provisioning", failed_provisioning)
if flattened_secrets is not None:
pulumi.set(__self__, "flattened_secrets", flattened_secrets)
if message is not None:
pulumi.set(__self__, "message", message)
if output is not None:
pulumi.set(__self__, "output", output)
if polling_url is not None:
pulumi.set(__self__, "polling_url", polling_url)
if provisioned is not None:
pulumi.set(__self__, "provisioned", provisioned)
if provisioning is not None:
pulumi.set(__self__, "provisioning", provisioning)
if requested is not None:
pulumi.set(__self__, "requested", requested)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if spec_hash is not None:
pulumi.set(__self__, "spec_hash", spec_hash)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter
def completed(self) -> Optional[str]:
return pulumi.get(self, "completed")
@property
@pulumi.getter(name="containsUpdate")
def contains_update(self) -> Optional[bool]:
return pulumi.get(self, "contains_update")
@property
@pulumi.getter(name="failedProvisioning")
def failed_provisioning(self) -> Optional[bool]:
return pulumi.get(self, "failed_provisioning")
@property
@pulumi.getter(name="flattenedSecrets")
def flattened_secrets(self) -> Optional[bool]:
return pulumi.get(self, "flattened_secrets")
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter
def output(self) -> Optional[str]:
return pulumi.get(self, "output")
@property
@pulumi.getter(name="pollingUrl")
def polling_url(self) -> Optional[str]:
return pulumi.get(self, "polling_url")
@property
@pulumi.getter
def provisioned(self) -> Optional[bool]:
return pulumi.get(self, "provisioned")
@property
@pulumi.getter
def provisioning(self) -> Optional[bool]:
return pulumi.get(self, "provisioning")
@property
@pulumi.getter
def requested(self) -> Optional[str]:
return pulumi.get(self, "requested")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="specHash")
def spec_hash(self) -> Optional[str]:
return pulumi.get(self, "spec_hash")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MySQLServerSpec(dict):
"""
MySQLServerSpec defines the desired state of MySQLServer
"""
def __init__(__self__, *,
location: str,
resource_group: str,
create_mode: Optional[str] = None,
key_vault_to_store_secrets: Optional[str] = None,
replica_properties: Optional['outputs.MySQLServerSpecReplicaProperties'] = None,
server_version: Optional[str] = None,
sku: Optional['outputs.MySQLServerSpecSku'] = None,
ssl_enforcement: Optional[str] = None,
storage_profile: Optional['outputs.MySQLServerSpecStorageProfile'] = None):
"""
MySQLServerSpec defines the desired state of MySQLServer
:param str server_version: ServerVersion enumerates the values for server version.
"""
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "resource_group", resource_group)
if create_mode is not None:
pulumi.set(__self__, "create_mode", create_mode)
if key_vault_to_store_secrets is not None:
pulumi.set(__self__, "key_vault_to_store_secrets", key_vault_to_store_secrets)
if replica_properties is not None:
pulumi.set(__self__, "replica_properties", replica_properties)
if server_version is not None:
pulumi.set(__self__, "server_version", server_version)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if ssl_enforcement is not None:
pulumi.set(__self__, "ssl_enforcement", ssl_enforcement)
if storage_profile is not None:
pulumi.set(__self__, "storage_profile", storage_profile)
@property
@pulumi.getter
def location(self) -> str:
return pulumi.get(self, "location")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> str:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="createMode")
def create_mode(self) -> Optional[str]:
return pulumi.get(self, "create_mode")
@property
@pulumi.getter(name="keyVaultToStoreSecrets")
def key_vault_to_store_secrets(self) -> Optional[str]:
return pulumi.get(self, "key_vault_to_store_secrets")
@property
@pulumi.getter(name="replicaProperties")
def replica_properties(self) -> Optional['outputs.MySQLServerSpecReplicaProperties']:
return pulumi.get(self, "replica_properties")
@property
@pulumi.getter(name="serverVersion")
def server_version(self) -> Optional[str]:
"""
ServerVersion enumerates the values for server version.
"""
return pulumi.get(self, "server_version")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.MySQLServerSpecSku']:
return pulumi.get(self, "sku")
@property
@pulumi.getter(name="sslEnforcement")
def ssl_enforcement(self) -> Optional[str]:
return pulumi.get(self, "ssl_enforcement")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional['outputs.MySQLServerSpecStorageProfile']:
return pulumi.get(self, "storage_profile")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MySQLServerSpecReplicaProperties(dict):
def __init__(__self__, *,
source_server_id: Optional[str] = None):
if source_server_id is not None:
pulumi.set(__self__, "source_server_id", source_server_id)
@property
@pulumi.getter(name="sourceServerId")
def source_server_id(self) -> Optional[str]:
return pulumi.get(self, "source_server_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MySQLServerSpecSku(dict):
def __init__(__self__, *,
capacity: Optional[int] = None,
family: Optional[str] = None,
name: Optional[str] = None,
size: Optional[str] = None,
tier: Optional[str] = None):
"""
:param int capacity: Capacity - The scale up/out capacity, representing server's compute units.
:param str family: Family - The family of hardware.
:param str name: Name - The name of the sku, typically, tier + family + cores, e.g. B_Gen4_1, GP_Gen5_8.
:param str size: Size - The size code, to be interpreted by resource as appropriate.
:param str tier: Tier - The tier of the particular SKU, e.g. Basic. Possible values include: 'Basic', 'GeneralPurpose', 'MemoryOptimized'
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if family is not None:
pulumi.set(__self__, "family", family)
if name is not None:
pulumi.set(__self__, "name", name)
if size is not None:
pulumi.set(__self__, "size", size)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[int]:
"""
Capacity - The scale up/out capacity, representing server's compute units.
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def family(self) -> Optional[str]:
"""
Family - The family of hardware.
"""
return pulumi.get(self, "family")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name - The name of the sku, typically, tier + family + cores, e.g. B_Gen4_1, GP_Gen5_8.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def size(self) -> Optional[str]:
"""
Size - The size code, to be interpreted by resource as appropriate.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
Tier - The tier of the particular SKU, e.g. Basic. Possible values include: 'Basic', 'GeneralPurpose', 'MemoryOptimized'
"""
return pulumi.get(self, "tier")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MySQLServerSpecStorageProfile(dict):
def __init__(__self__, *,
backup_retention_days: Optional[int] = None,
geo_redundant_backup: Optional[str] = None,
storage_autogrow: Optional[str] = None,
storage_mb: Optional[int] = None):
"""
:param int backup_retention_days: BackupRetentionDays - Backup retention days for the server.
:param str geo_redundant_backup: GeoRedundantBackup - Enable Geo-redundant or not for server backup. Possible values include: 'Enabled', 'Disabled'
:param str storage_autogrow: StorageAutogrow - Enable Storage Auto Grow. Possible values include: 'StorageAutogrowEnabled', 'StorageAutogrowDisabled'
:param int storage_mb: StorageMB - Max storage allowed for a server.
"""
if backup_retention_days is not None:
pulumi.set(__self__, "backup_retention_days", backup_retention_days)
if geo_redundant_backup is not None:
pulumi.set(__self__, "geo_redundant_backup", geo_redundant_backup)
if storage_autogrow is not None:
pulumi.set(__self__, "storage_autogrow", storage_autogrow)
if storage_mb is not None:
pulumi.set(__self__, "storage_mb", storage_mb)
@property
@pulumi.getter(name="backupRetentionDays")
def backup_retention_days(self) -> Optional[int]:
"""
BackupRetentionDays - Backup retention days for the server.
"""
return pulumi.get(self, "backup_retention_days")
@property
@pulumi.getter(name="geoRedundantBackup")
def geo_redundant_backup(self) -> Optional[str]:
"""
GeoRedundantBackup - Enable Geo-redundant or not for server backup. Possible values include: 'Enabled', 'Disabled'
"""
return pulumi.get(self, "geo_redundant_backup")
@property
@pulumi.getter(name="storageAutogrow")
def storage_autogrow(self) -> Optional[str]:
"""
StorageAutogrow - Enable Storage Auto Grow. Possible values include: 'StorageAutogrowEnabled', 'StorageAutogrowDisabled'
"""
return pulumi.get(self, "storage_autogrow")
@property
@pulumi.getter(name="storageMB")
def storage_mb(self) -> Optional[int]:
"""
StorageMB - Max storage allowed for a server.
"""
return pulumi.get(self, "storage_mb")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MySQLServerStatus(dict):
"""
ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
def __init__(__self__, *,
completed: Optional[str] = None,
contains_update: Optional[bool] = None,
failed_provisioning: Optional[bool] = None,
flattened_secrets: Optional[bool] = None,
message: Optional[str] = None,
output: Optional[str] = None,
polling_url: Optional[str] = None,
provisioned: Optional[bool] = None,
provisioning: Optional[bool] = None,
requested: Optional[str] = None,
resource_id: Optional[str] = None,
spec_hash: Optional[str] = None,
state: Optional[str] = None):
"""
ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
if completed is not None:
pulumi.set(__self__, "completed", completed)
if contains_update is not None:
pulumi.set(__self__, "contains_update", contains_update)
if failed_provisioning is not None:
pulumi.set(__self__, "failed_provisioning", failed_provisioning)
if flattened_secrets is not None:
pulumi.set(__self__, "flattened_secrets", flattened_secrets)
if message is not None:
pulumi.set(__self__, "message", message)
if output is not None:
pulumi.set(__self__, "output", output)
if polling_url is not None:
pulumi.set(__self__, "polling_url", polling_url)
if provisioned is not None:
pulumi.set(__self__, "provisioned", provisioned)
if provisioning is not None:
pulumi.set(__self__, "provisioning", provisioning)
if requested is not None:
pulumi.set(__self__, "requested", requested)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if spec_hash is not None:
pulumi.set(__self__, "spec_hash", spec_hash)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter
def completed(self) -> Optional[str]:
return pulumi.get(self, "completed")
@property
@pulumi.getter(name="containsUpdate")
def contains_update(self) -> Optional[bool]:
return pulumi.get(self, "contains_update")
@property
@pulumi.getter(name="failedProvisioning")
def failed_provisioning(self) -> Optional[bool]:
return pulumi.get(self, "failed_provisioning")
@property
@pulumi.getter(name="flattenedSecrets")
def flattened_secrets(self) -> Optional[bool]:
return pulumi.get(self, "flattened_secrets")
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter
def output(self) -> Optional[str]:
return pulumi.get(self, "output")
@property
@pulumi.getter(name="pollingUrl")
def polling_url(self) -> Optional[str]:
return pulumi.get(self, "polling_url")
@property
@pulumi.getter
def provisioned(self) -> Optional[bool]:
return pulumi.get(self, "provisioned")
@property
@pulumi.getter
def provisioning(self) -> Optional[bool]:
return pulumi.get(self, "provisioning")
@property
@pulumi.getter
def requested(self) -> Optional[str]:
return pulumi.get(self, "requested")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="specHash")
def spec_hash(self) -> Optional[str]:
return pulumi.get(self, "spec_hash")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PostgreSQLServerSpec(dict):
"""
PostgreSQLServerSpec defines the desired state of PostgreSQLServer
"""
def __init__(__self__, *,
location: str,
resource_group: str,
create_mode: Optional[str] = None,
key_vault_to_store_secrets: Optional[str] = None,
replica_properties: Optional['outputs.PostgreSQLServerSpecReplicaProperties'] = None,
server_version: Optional[str] = None,
sku: Optional['outputs.PostgreSQLServerSpecSku'] = None,
ssl_enforcement: Optional[str] = None,
storage_profile: Optional['outputs.PostgreSQLServerSpecStorageProfile'] = None):
"""
PostgreSQLServerSpec defines the desired state of PostgreSQLServer
:param str server_version: ServerVersion enumerates the values for server version.
"""
pulumi.set(__self__, "location", location)
pulumi.set(__self__, "resource_group", resource_group)
if create_mode is not None:
pulumi.set(__self__, "create_mode", create_mode)
if key_vault_to_store_secrets is not None:
pulumi.set(__self__, "key_vault_to_store_secrets", key_vault_to_store_secrets)
if replica_properties is not None:
pulumi.set(__self__, "replica_properties", replica_properties)
if server_version is not None:
pulumi.set(__self__, "server_version", server_version)
if sku is not None:
pulumi.set(__self__, "sku", sku)
if ssl_enforcement is not None:
pulumi.set(__self__, "ssl_enforcement", ssl_enforcement)
if storage_profile is not None:
pulumi.set(__self__, "storage_profile", storage_profile)
@property
@pulumi.getter
def location(self) -> str:
return pulumi.get(self, "location")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> str:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="createMode")
def create_mode(self) -> Optional[str]:
return pulumi.get(self, "create_mode")
@property
@pulumi.getter(name="keyVaultToStoreSecrets")
def key_vault_to_store_secrets(self) -> Optional[str]:
return pulumi.get(self, "key_vault_to_store_secrets")
@property
@pulumi.getter(name="replicaProperties")
def replica_properties(self) -> Optional['outputs.PostgreSQLServerSpecReplicaProperties']:
return pulumi.get(self, "replica_properties")
@property
@pulumi.getter(name="serverVersion")
def server_version(self) -> Optional[str]:
"""
ServerVersion enumerates the values for server version.
"""
return pulumi.get(self, "server_version")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.PostgreSQLServerSpecSku']:
return pulumi.get(self, "sku")
@property
@pulumi.getter(name="sslEnforcement")
def ssl_enforcement(self) -> Optional[str]:
return pulumi.get(self, "ssl_enforcement")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional['outputs.PostgreSQLServerSpecStorageProfile']:
return pulumi.get(self, "storage_profile")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PostgreSQLServerSpecReplicaProperties(dict):
def __init__(__self__, *,
source_server_id: Optional[str] = None):
if source_server_id is not None:
pulumi.set(__self__, "source_server_id", source_server_id)
@property
@pulumi.getter(name="sourceServerId")
def source_server_id(self) -> Optional[str]:
return pulumi.get(self, "source_server_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PostgreSQLServerSpecSku(dict):
def __init__(__self__, *,
capacity: Optional[int] = None,
family: Optional[str] = None,
name: Optional[str] = None,
size: Optional[str] = None,
tier: Optional[str] = None):
"""
:param int capacity: Capacity - The scale up/out capacity, representing server's compute units.
:param str family: Family - The family of hardware.
:param str name: Name - The name of the sku, typically, tier + family + cores, e.g. B_Gen4_1, GP_Gen5_8.
:param str size: Size - The size code, to be interpreted by resource as appropriate.
:param str tier: Tier - The tier of the particular SKU, e.g. Basic. Possible values include: 'Basic', 'GeneralPurpose', 'MemoryOptimized'
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if family is not None:
pulumi.set(__self__, "family", family)
if name is not None:
pulumi.set(__self__, "name", name)
if size is not None:
pulumi.set(__self__, "size", size)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[int]:
"""
Capacity - The scale up/out capacity, representing server's compute units.
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def family(self) -> Optional[str]:
"""
Family - The family of hardware.
"""
return pulumi.get(self, "family")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name - The name of the sku, typically, tier + family + cores, e.g. B_Gen4_1, GP_Gen5_8.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def size(self) -> Optional[str]:
"""
Size - The size code, to be interpreted by resource as appropriate.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
Tier - The tier of the particular SKU, e.g. Basic. Possible values include: 'Basic', 'GeneralPurpose', 'MemoryOptimized'
"""
return pulumi.get(self, "tier")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PostgreSQLServerSpecStorageProfile(dict):
def __init__(__self__, *,
backup_retention_days: Optional[int] = None,
geo_redundant_backup: Optional[str] = None,
storage_autogrow: Optional[str] = None,
storage_mb: Optional[int] = None):
"""
:param int backup_retention_days: BackupRetentionDays - Backup retention days for the server.
:param str geo_redundant_backup: GeoRedundantBackup - Enable Geo-redundant or not for server backup. Possible values include: 'Enabled', 'Disabled'
:param str storage_autogrow: StorageAutogrow - Enable Storage Auto Grow. Possible values include: 'StorageAutogrowEnabled', 'StorageAutogrowDisabled'
:param int storage_mb: StorageMB - Max storage allowed for a server.
"""
if backup_retention_days is not None:
pulumi.set(__self__, "backup_retention_days", backup_retention_days)
if geo_redundant_backup is not None:
pulumi.set(__self__, "geo_redundant_backup", geo_redundant_backup)
if storage_autogrow is not None:
pulumi.set(__self__, "storage_autogrow", storage_autogrow)
if storage_mb is not None:
pulumi.set(__self__, "storage_mb", storage_mb)
@property
@pulumi.getter(name="backupRetentionDays")
def backup_retention_days(self) -> Optional[int]:
"""
BackupRetentionDays - Backup retention days for the server.
"""
return pulumi.get(self, "backup_retention_days")
@property
@pulumi.getter(name="geoRedundantBackup")
def geo_redundant_backup(self) -> Optional[str]:
"""
GeoRedundantBackup - Enable Geo-redundant or not for server backup. Possible values include: 'Enabled', 'Disabled'
"""
return pulumi.get(self, "geo_redundant_backup")
@property
@pulumi.getter(name="storageAutogrow")
def storage_autogrow(self) -> Optional[str]:
"""
StorageAutogrow - Enable Storage Auto Grow. Possible values include: 'StorageAutogrowEnabled', 'StorageAutogrowDisabled'
"""
return pulumi.get(self, "storage_autogrow")
@property
@pulumi.getter(name="storageMB")
def storage_mb(self) -> Optional[int]:
"""
StorageMB - Max storage allowed for a server.
"""
return pulumi.get(self, "storage_mb")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PostgreSQLServerStatus(dict):
"""
ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
def __init__(__self__, *,
completed: Optional[str] = None,
contains_update: Optional[bool] = None,
failed_provisioning: Optional[bool] = None,
flattened_secrets: Optional[bool] = None,
message: Optional[str] = None,
output: Optional[str] = None,
polling_url: Optional[str] = None,
provisioned: Optional[bool] = None,
provisioning: Optional[bool] = None,
requested: Optional[str] = None,
resource_id: Optional[str] = None,
spec_hash: Optional[str] = None,
state: Optional[str] = None):
"""
ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
if completed is not None:
pulumi.set(__self__, "completed", completed)
if contains_update is not None:
pulumi.set(__self__, "contains_update", contains_update)
if failed_provisioning is not None:
pulumi.set(__self__, "failed_provisioning", failed_provisioning)
if flattened_secrets is not None:
pulumi.set(__self__, "flattened_secrets", flattened_secrets)
if message is not None:
pulumi.set(__self__, "message", message)
if output is not None:
pulumi.set(__self__, "output", output)
if polling_url is not None:
pulumi.set(__self__, "polling_url", polling_url)
if provisioned is not None:
pulumi.set(__self__, "provisioned", provisioned)
if provisioning is not None:
pulumi.set(__self__, "provisioning", provisioning)
if requested is not None:
pulumi.set(__self__, "requested", requested)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if spec_hash is not None:
pulumi.set(__self__, "spec_hash", spec_hash)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter
def completed(self) -> Optional[str]:
return pulumi.get(self, "completed")
@property
@pulumi.getter(name="containsUpdate")
def contains_update(self) -> Optional[bool]:
return pulumi.get(self, "contains_update")
@property
@pulumi.getter(name="failedProvisioning")
def failed_provisioning(self) -> Optional[bool]:
return pulumi.get(self, "failed_provisioning")
@property
@pulumi.getter(name="flattenedSecrets")
def flattened_secrets(self) -> Optional[bool]:
return pulumi.get(self, "flattened_secrets")
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter
def output(self) -> Optional[str]:
return pulumi.get(self, "output")
@property
@pulumi.getter(name="pollingUrl")
def polling_url(self) -> Optional[str]:
return pulumi.get(self, "polling_url")
@property
@pulumi.getter
def provisioned(self) -> Optional[bool]:
return pulumi.get(self, "provisioned")
@property
@pulumi.getter
def provisioning(self) -> Optional[bool]:
return pulumi.get(self, "provisioning")
@property
@pulumi.getter
def requested(self) -> Optional[str]:
return pulumi.get(self, "requested")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="specHash")
def spec_hash(self) -> Optional[str]:
return pulumi.get(self, "spec_hash")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| 36.613155
| 157
| 0.638512
| 3,591
| 32,842
| 5.5912
| 0.058201
| 0.052595
| 0.052445
| 0.076651
| 0.926736
| 0.926537
| 0.925441
| 0.899641
| 0.897749
| 0.892768
| 0
| 0.000739
| 0.258632
| 32,842
| 896
| 158
| 36.654018
| 0.823887
| 0.153706
| 0
| 0.912637
| 1
| 0
| 0.11984
| 0.031731
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163807
| false
| 0
| 0.00936
| 0.110764
| 0.336973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
8b61a1712eefcc6e443911a9a14e9c308a48e253
| 3,800
|
py
|
Python
|
hatvp/migrations/0026_auto_20200127_1451.py
|
WilliamLafarie/hatvp
|
76e856ec53e51f5a214a217bb07d15426269e7d7
|
[
"MIT"
] | null | null | null |
hatvp/migrations/0026_auto_20200127_1451.py
|
WilliamLafarie/hatvp
|
76e856ec53e51f5a214a217bb07d15426269e7d7
|
[
"MIT"
] | null | null | null |
hatvp/migrations/0026_auto_20200127_1451.py
|
WilliamLafarie/hatvp
|
76e856ec53e51f5a214a217bb07d15426269e7d7
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0 on 2020-01-27 14:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hatvp', '0025_auto_20200127_1450'),
]
operations = [
migrations.AlterField(
model_name='affiliations',
name='identifiant_national_affiliation',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='affiliations',
name='type_identifiant_national_affiliation',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='clients',
name='denomination_client',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='clients',
name='identifiant_national_client',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='clients',
name='type_identifiant_national_client',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='collaborateurs',
name='fonction_collaborateur',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='collaborateurs',
name='nom_collaborateur',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='collaborateurs',
name='nom_prenom_collaborateur',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='collaborateurs',
name='prenom_collaborateur',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dirigeants',
name='fonction_dirigeant',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dirigeants',
name='nom_dirigeant',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dirigeants',
name='nom_prenom_dirigeant',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='dirigeants',
name='prenom_dirigeant',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='domaines_intervention',
name='domaines_intervention_actions_menees',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='exercices',
name='chiffre_affaires',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='exercices',
name='montant_depense',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='exercices',
name='nombre_salaries',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='objets_activites',
name='identifiant_fiche',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| 36.538462
| 74
| 0.595526
| 364
| 3,800
| 6.027473
| 0.178571
| 0.164084
| 0.205105
| 0.237922
| 0.855059
| 0.855059
| 0.827256
| 0.827256
| 0.827256
| 0.827256
| 0
| 0.031297
| 0.293684
| 3,800
| 103
| 75
| 36.893204
| 0.78614
| 0.011316
| 0
| 0.721649
| 1
| 0
| 0.16751
| 0.067643
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010309
| 0
| 0.041237
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
8bd505f1b815f8be379b8a250519f89c0b6b2fd5
| 116
|
py
|
Python
|
test/test_binary_classfication.py
|
SynchronyML/Auto_ML_C
|
08c266184708e60390c885bcc88b89a63180e3a7
|
[
"MIT"
] | 1
|
2021-12-19T05:36:36.000Z
|
2021-12-19T05:36:36.000Z
|
test/test_binary_classfication.py
|
SynchronyML/Auto_Taste_ML
|
7978abb0d24c374c55b36905071cbd4ae4196279
|
[
"MIT"
] | null | null | null |
test/test_binary_classfication.py
|
SynchronyML/Auto_Taste_ML
|
7978abb0d24c374c55b36905071cbd4ae4196279
|
[
"MIT"
] | null | null | null |
from auto_ml_c import binary_classfication
def test_cal_add_1():
assert binary_classfication.cal_add_1(1,1) == 2
| 38.666667
| 51
| 0.810345
| 21
| 116
| 4.047619
| 0.666667
| 0.447059
| 0.164706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048544
| 0.112069
| 116
| 3
| 51
| 38.666667
| 0.776699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4757a3cae86d7463367e8888f09c1b36f00b219e
| 70
|
py
|
Python
|
Beginner/1019.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Beginner/1019.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
Beginner/1019.py
|
LorranSutter/URI-Online-Judge
|
aef885b9a7caa83484cf172e29eea8ec92fc3627
|
[
"MIT"
] | null | null | null |
N = int(input())
print('{}:{}:{}'.format(N//60//60,N//60%60,N%60%60))
| 23.333333
| 52
| 0.5
| 14
| 70
| 2.5
| 0.428571
| 0.257143
| 0.428571
| 0.342857
| 0.428571
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0.057143
| 70
| 2
| 53
| 35
| 0.348485
| 0
| 0
| 0
| 0
| 0
| 0.114286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
47e9d5240875b966abe93cf9ee0cb02de4fc906b
| 221
|
py
|
Python
|
segmenter/layers/__init__.py
|
brandongk/segmenter
|
dbc042d31dc74f1abdc87ae10a6be78ba38ddb91
|
[
"Unlicense"
] | null | null | null |
segmenter/layers/__init__.py
|
brandongk/segmenter
|
dbc042d31dc74f1abdc87ae10a6be78ba38ddb91
|
[
"Unlicense"
] | null | null | null |
segmenter/layers/__init__.py
|
brandongk/segmenter
|
dbc042d31dc74f1abdc87ae10a6be78ba38ddb91
|
[
"Unlicense"
] | null | null | null |
from segmenter.layers.AddSingleGradient import AddSingleGradient
from segmenter.layers.AverageSingleGradient import AverageSingleGradient
from segmenter.layers.NoisyOr import NoisyOr
from segmenter.layers.Vote import Vote
| 55.25
| 72
| 0.895928
| 24
| 221
| 8.25
| 0.333333
| 0.262626
| 0.383838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067873
| 221
| 4
| 73
| 55.25
| 0.961165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9a55dba54daec6e22b68b8e18502681a298a804e
| 7,962
|
py
|
Python
|
user_statistics/tests/test_services.py
|
tgamauf/spritstat
|
849526ec8dec46c57194d50ff3b32c16d0cb684a
|
[
"MIT"
] | 1
|
2022-01-30T10:50:14.000Z
|
2022-01-30T10:50:14.000Z
|
user_statistics/tests/test_services.py
|
tgamauf/spritstat
|
849526ec8dec46c57194d50ff3b32c16d0cb684a
|
[
"MIT"
] | 47
|
2022-02-02T22:07:28.000Z
|
2022-03-30T13:53:37.000Z
|
user_statistics/tests/test_services.py
|
tgamauf/spritstat
|
849526ec8dec46c57194d50ff3b32c16d0cb684a
|
[
"MIT"
] | null | null | null |
from django.db.utils import IntegrityError
from django.test import TestCase
from django.utils import timezone
import pytz
from unittest.mock import patch
from user_visit.models import UserVisit
from user_statistics.models import DailyActiveUsers, MonthlyActiveUsers
from user_statistics.services import (
calculate_daily_active_users,
calculate_monthly_active_users,
delete_past_user_visits,
)
class TestDailyActiveUsers(TestCase):
fixtures = ["user.json"]
def test_multiple_visits_of_same_user(self):
now = timezone.datetime(2022, 4, 19, 10, 0)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 15),
session_key="session_1",
hash="hash_1",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 18, 10, 0),
session_key="session_2",
hash="hash_2",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 18, 23, 59, 59),
session_key="session_3",
hash="hash_3",
)
with patch("django.utils.timezone.now", return_value=now):
calculate_daily_active_users()
self.assertEqual(DailyActiveUsers.objects.count(), 1)
entry = DailyActiveUsers.objects.last()
self.assertEqual(entry.date, timezone.datetime(2022, 4, 18).date())
self.assertEqual(entry.count, 1)
self.assertEqual(entry.fraction, 0.25)
def test_multiple_visits_of_different_users(self):
now = timezone.datetime(2022, 4, 19, 10, 0)
UserVisit.objects.create(
user_id=100,
timestamp=timezone.datetime(2022, 4, 10),
session_key="session_1",
hash="hash_1",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 18, 10, 0),
session_key="session_2",
hash="hash_2",
)
UserVisit.objects.create(
user_id=300,
timestamp=timezone.datetime(2022, 4, 18, 12, 10),
session_key="session_3",
hash="hash_3",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 18, 15, 10),
session_key="session_4",
hash="hash_4",
)
with patch("django.utils.timezone.now", return_value=now):
calculate_daily_active_users()
self.assertEqual(DailyActiveUsers.objects.count(), 1)
entry = DailyActiveUsers.objects.last()
self.assertEqual(entry.date, timezone.datetime(2022, 4, 18).date())
self.assertEqual(entry.count, 2)
self.assertEqual(entry.fraction, 0.5)
def test_duplicate_calculation(self):
now = timezone.datetime(2022, 4, 19, 10, 0)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 15),
session_key="session_1",
hash="hash_1",
)
with patch("django.utils.timezone.now", return_value=now):
calculate_daily_active_users()
with self.assertRaises(IntegrityError):
calculate_daily_active_users()
class TestMonthlyActiveUsers(TestCase):
fixtures = ["user.json"]
def test_multiple_visits_of_same_user(self):
now = timezone.datetime(2022, 5, 3, 22, 0)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 3, 18),
session_key="session_1",
hash="hash_1",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 1),
session_key="session_2",
hash="hash_2",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 10, 12),
session_key="session_3",
hash="hash_3",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 10, 15),
session_key="session_4",
hash="hash_4",
)
with patch("django.utils.timezone.now", return_value=now):
calculate_monthly_active_users()
self.assertEqual(MonthlyActiveUsers.objects.count(), 1)
entry = MonthlyActiveUsers.objects.last()
self.assertEqual(entry.date, timezone.datetime(2022, 4, 30).date())
self.assertEqual(entry.count, 1)
self.assertEqual(entry.fraction, 0.25)
def test_multiple_visits_of_different_users(self):
now = timezone.datetime(2022, 5, 3, 22, 0)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 3, 18),
session_key="session_1",
hash="hash_1",
)
UserVisit.objects.create(
user_id=300,
timestamp=timezone.datetime(2022, 3, 22),
session_key="session_2",
hash="hash_2",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 1),
session_key="session_3",
hash="hash_3",
)
UserVisit.objects.create(
user_id=300,
timestamp=timezone.datetime(2022, 4, 2),
session_key="session_4",
hash="hash_4",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 10, 10),
session_key="session_5",
hash="hash_5",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 10, 23),
session_key="session_6",
hash="hash_6",
)
with patch("django.utils.timezone.now", return_value=now):
calculate_monthly_active_users()
self.assertEqual(MonthlyActiveUsers.objects.count(), 1)
entry = MonthlyActiveUsers.objects.last()
self.assertEqual(entry.date, timezone.datetime(2022, 4, 30).date())
self.assertEqual(entry.count, 2)
self.assertEqual(entry.fraction, 0.5)
def test_duplicate_calculation(self):
now = timezone.datetime(2022, 4, 19, 10, 0)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 15),
session_key="session_1",
hash="hash_1",
)
with patch("django.utils.timezone.now", return_value=now):
calculate_monthly_active_users()
with self.assertRaises(IntegrityError):
calculate_monthly_active_users()
class TestDeletePastUserVisits(TestCase):
fixtures = ["user.json"]
def test_multiple_visits_of_same_user(self):
now = timezone.datetime(2022, 5, 1, 3)
check_timestamp = timezone.datetime(2022, 5, 1, tzinfo=pytz.utc)
UserVisit.objects.create(
user_id=100,
timestamp=timezone.datetime(2022, 3, 18),
session_key="session_1",
hash="hash_1",
)
UserVisit.objects.create(
user_id=200,
timestamp=timezone.datetime(2022, 4, 1),
session_key="session_2",
hash="hash_2",
)
UserVisit.objects.create(
user_id=300,
timestamp=timezone.datetime(2022, 4, 10),
session_key="session_3",
hash="hash_3",
)
UserVisit.objects.create(
user_id=200,
timestamp=check_timestamp,
session_key="session_4",
hash="hash_4",
)
with patch("django.utils.timezone.now", return_value=now):
delete_past_user_visits()
self.assertEqual(UserVisit.objects.count(), 1)
self.assertEqual(UserVisit.objects.last().timestamp, check_timestamp)
| 32.631148
| 77
| 0.58842
| 880
| 7,962
| 5.130682
| 0.097727
| 0.120487
| 0.150609
| 0.12093
| 0.848948
| 0.843632
| 0.843189
| 0.812182
| 0.812182
| 0.811517
| 0
| 0.073854
| 0.301055
| 7,962
| 243
| 78
| 32.765432
| 0.737466
| 0
| 0
| 0.729858
| 0
| 0
| 0.068701
| 0.021979
| 0
| 0
| 0
| 0
| 0.094787
| 1
| 0.033175
| false
| 0
| 0.037915
| 0
| 0.099526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a58f19ecd5b153e30e1be45d645bf77f61b949f
| 6,632
|
py
|
Python
|
testsuite/test_ext.py
|
h-friederich/lpm
|
144d29bf66fe495b6b4f05c4e69fbf31ba50f01b
|
[
"BSD-3-Clause"
] | null | null | null |
testsuite/test_ext.py
|
h-friederich/lpm
|
144d29bf66fe495b6b4f05c4e69fbf31ba50f01b
|
[
"BSD-3-Clause"
] | null | null | null |
testsuite/test_ext.py
|
h-friederich/lpm
|
144d29bf66fe495b6b4f05c4e69fbf31ba50f01b
|
[
"BSD-3-Clause"
] | null | null | null |
from bson.json_util import loads, dumps
from testsuite import DataBaseTestCase
class ExtApiTest(DataBaseTestCase):
def test_item_filter(self):
# insufficient privileges -> redirect to login
rv = self.open_with_auth('/ext/items', username='worker', method='POST',
data=dict(filter='{"partno": "TE0001a"}'))
self.assertEqual(302, rv.status_code)
self.assertIn('/login', rv.location)
rv = self.open_with_auth('/ext/items', username='viewer', method='POST',
data=dict(filter='{"partno": "TE0001a"}'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertTrue(data.get('ok'))
self.assertEqual(['LP0001'], data.get('serials'))
rv = self.open_with_auth('/ext/items', username='viewer', method='POST',
data=dict())
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertFalse(data.get('ok'))
self.assertIn('missing filter', data.get('message'))
rv = self.open_with_auth('/ext/items', username='viewer', method='POST',
data=dict(filt='{"$partno": "TE0001a"}'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertFalse(data.get('ok'))
self.assertIn('missing filter', data.get('message'))
def test_item_info(self):
# insufficient privileges -> redirect to login
rv = self.open_with_auth('/ext/items/LP0001', username='worker')
self.assertEqual(302, rv.status_code)
self.assertIn('/login', rv.location)
rv = self.open_with_auth('/ext/items/LP0001')
self.assertEqual(200, rv.status_code)
obj = loads(rv.data.decode('utf-8'))
refobj = {
'_id': 'LP0001',
'partno': 'TE0001a',
'available': True,
'status': '',
'comments': []
}
self.assertEqual(refobj, obj)
def test_item_update(self):
# insufficient privileges -> redirect to login
rv = self.open_with_auth('/ext/items/update/LP0001', username='worker', method='POST',
data=dict(comment='some comment'))
self.assertEqual(302, rv.status_code)
self.assertIn('/login', rv.location)
rv = self.open_with_auth('/ext/items/update/LP0002', username='viewer', method='POST',
data=dict(comment='some comment'))
self.assertEqual(404, rv.status_code)
rv = self.open_with_auth('/ext/items/update/LP0001', username='viewer', method='POST',
data=dict(comment='some comment'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertTrue(data.get('ok'))
with self.app.app_context():
obj = self.app.mongo.db.items.find_one('LP0001')
comments = obj.get('comments')
self.assertEqual(1, len(comments))
comment = comments[0]
self.assertEqual('some comment', comment.get('message'))
rv = self.open_with_auth('/ext/items/update/LP0001', username='admin', method='POST',
data=dict(set='{"key1": "B", "key2": 5.4}',
push='{"key3": {"$each": [5, 6]}}',
status='obsolete'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertTrue(data.get('ok'))
with self.app.app_context():
obj = self.app.mongo.db.items.find_one('LP0001')
comments = obj.get('comments')
self.assertEqual(2, len(comments))
comment = comments[0]
self.assertEqual('some comment', comment.get('message'))
comment = comments[1]
self.assertEqual("[Auto] changed status to 'obsolete'", comment.get('message'))
self.assertEqual('B', obj.get('key1'))
self.assertEqual(5.4, obj.get('key2'))
self.assertEqual([5, 6], obj.get('key3'))
rv = self.open_with_auth('/ext/items/update/LP0001', username='admin', method='POST',
data=dict(set='{"key4": 2}',
update='{"key1": "C", "key2": 5.5}',
push='{"key3": {"$each": [5, 6]}}',
comment='a new comment'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertTrue(data.get('ok'))
with self.app.app_context():
obj = self.app.mongo.db.items.find_one('LP0001')
comments = obj.get('comments')
self.assertEqual(3, len(comments))
comment = comments[0]
self.assertEqual('some comment', comment.get('message'))
comment = comments[1]
self.assertEqual("[Auto] changed status to 'obsolete'", comment.get('message'))
comment = comments[2]
self.assertEqual('a new comment', comment.get('message'))
self.assertEqual('C', obj.get('key1'))
self.assertEqual(5.5, obj.get('key2'))
self.assertEqual([5, 6, 5, 6], obj.get('key3'))
self.assertEqual(2, obj.get('key4'))
rv = self.open_with_auth('/ext/items/update/LP0001', username='admin', method='POST',
data=dict(update='{"key4": "C"}'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertFalse(data.get('ok'))
self.assertEqual("No permission to update key 'key4'", data.get('message'))
rv = self.open_with_auth('/ext/items/update/LP0001', username='admin', method='POST',
data=dict(set='{"key4": "E"}'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertFalse(data.get('ok'))
self.assertEqual("operation would overwrite existing entry 'key4'", data.get('message'))
rv = self.open_with_auth('/ext/items/update/LP0001', username='admin', method='POST',
data=dict(set='{"key5": "E"}', status='test'))
self.assertEqual(200, rv.status_code)
data = loads(rv.data.decode('utf-8'))
self.assertFalse(data.get('ok'))
self.assertEqual("unknown status: 'test'", data.get('message'))
| 51.015385
| 96
| 0.550211
| 755
| 6,632
| 4.75894
| 0.141722
| 0.146117
| 0.038965
| 0.054551
| 0.855831
| 0.820484
| 0.788478
| 0.773448
| 0.766212
| 0.744225
| 0
| 0.037194
| 0.28649
| 6,632
| 129
| 97
| 51.410853
| 0.722105
| 0.020205
| 0
| 0.59322
| 0
| 0
| 0.186566
| 0.029579
| 0
| 0
| 0
| 0
| 0.415254
| 1
| 0.025424
| false
| 0
| 0.016949
| 0
| 0.050847
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7bf007946e065d4672781d4d7044bb514f2e72fb
| 8,822
|
py
|
Python
|
inferbeddings/fuzzy/operators.py
|
issca/inferbeddings
|
80492a7aebcdcac21e758514c8af403d77e8594a
|
[
"MIT"
] | 33
|
2017-07-25T14:31:00.000Z
|
2019-03-06T09:18:00.000Z
|
inferbeddings/fuzzy/operators.py
|
issca/inferbeddings
|
80492a7aebcdcac21e758514c8af403d77e8594a
|
[
"MIT"
] | 1
|
2017-08-22T13:49:30.000Z
|
2017-08-22T13:49:30.000Z
|
inferbeddings/fuzzy/operators.py
|
issca/inferbeddings
|
80492a7aebcdcac21e758514c8af403d77e8594a
|
[
"MIT"
] | 9
|
2017-10-05T08:50:45.000Z
|
2019-04-18T12:40:56.000Z
|
# -*- coding: utf-8 -*-
import abc
import sys
import tensorflow as tf
class TOperators(metaclass=abc.ABCMeta):
"""
Abstract class implementing a generic interface for a set of T-operators (T-norm, T-conorm and negation function).
The general properties of a set of T-operators are described in [1].
[1] Gupta, M. M. et al. - Theory of T-norms and fuzzy inference methods - Fuzzy Sets and Systems, Vol. 40, 1991, 431-450.
"""
@abc.abstractmethod
def norm(self, x, y):
"""
Abstract interface for a generic T-norm.
:param x: (N,) Tensor containing the first terms of the t-norm.
:param y: (N,) Tensor containing the second terms of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
raise NotImplementedError
@abc.abstractmethod
def conorm(self, x, y):
"""
Abstract interface for a generic T-conorm.
:param x: (N,) Tensor containing the first terms of the t-norm.
:param y: (N,) Tensor containing the second terms of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
raise NotImplementedError
@abc.abstractmethod
def negation(self, x):
"""
Abstract interface for a generic negation function.
:param x: (N,) Tensor containing the first terms of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
raise NotImplementedError
class ZadehTOperators(TOperators):
"""
Zadeh's T-operators [1, 2].
[1] Zadeh, L. A. - Outline of a new approach to the analysis of complex systems and decision processes - IEEE Trans. Systems Man Cybernet. 3 (1973) 28-44.
[2] Gupta, M. M. et al. - Theory of T-norms and fuzzy inference methods - Fuzzy Sets and Systems, Vol. 40, 1991, 431-450.
"""
def norm(self, x, y):
"""
.. math:: \top(x, y) = min(x, y)
:param x: (N,) Tensor containing the first arguments of the t-norm.
:param y: (N,) Tensor containing the second arguments of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
return tf.minimum(x, y)
def conorm(self, x, y):
"""
.. math:: \top*(x, y) = max(x, y)
:param x: (N,) Tensor containing the first arguments of the t-conorm.
:param y: (N,) Tensor containing the second arguments of the t-conorm.
:return: (N,) Tensor containing the resulting t-conorm values.
"""
return tf.maximum(x, y)
def negation(self, x):
"""
.. math:: \neg(x) = 1 - x
:param x: (N,) Tensor containing the arguments of the functional negation.
:return: (N,) Tensor containing the resulting negated values.
"""
return 1 - x
class ProbabilisticTOperators(TOperators):
"""
Probabilistic T-operators [1, 2].
[1] Weber, S. - A general concept of fuzzy connectives, negations and implications based on t-norms and t-conorms - Fuzzy Sets and Systems 11 (1983) 115-134.
[2] Gupta, M. M. et al. - Theory of T-norms and fuzzy inference methods - Fuzzy Sets and Systems, Vol. 40, 1991, 431-450.
"""
def norm(self, x, y):
"""
.. math:: \top(x, y) = x * y
:param x: (N,) Tensor containing the first arguments of the t-norm.
:param y: (N,) Tensor containing the second arguments of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
return x * y
def conorm(self, x, y):
"""
.. math:: \top*(x, y) = x + y - xy
:param x: (N,) Tensor containing the first arguments of the t-conorm.
:param y: (N,) Tensor containing the second arguments of the t-conorm.
:return: (N,) Tensor containing the resulting t-conorm values.
"""
return x + y - x * y
def negation(self, x):
"""
.. math:: \neg(x) = 1 - x
:param x: (N,) Tensor containing the arguments of the functional negation.
:return: (N,) Tensor containing the resulting negated values.
"""
return 1 - x
class LukasiewiczTOperators(TOperators):
"""
Łukasiewicz T-operators [1, 2]
[1] Giles, R. - Lukasiewicz logic and fuzzy set theory - Internat. J. Man-Machine Stud. 8 (1976) 313-327.
[2] Gupta, M. M. et al. - Theory of T-norms and fuzzy inference methods - Fuzzy Sets and Systems, Vol. 40, 1991, 431-450.
"""
def norm(self, x, y):
"""
.. math:: \top(x, y) = max(0, x + y - 1)
:param x: (N,) Tensor containing the first arguments of the t-norm.
:param y: (N,) Tensor containing the second arguments of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
return tf.maximum(0, x + y - 1)
def conorm(self, x, y):
"""
.. math:: \top*(x, y) = min(1, x + y)
:param x: (N,) Tensor containing the first arguments of the t-conorm.
:param y: (N,) Tensor containing the second arguments of the t-conorm.
:return: (N,) Tensor containing the resulting t-conorm values.
"""
return tf.minimum(1, x + y)
def negation(self, x):
"""
.. math:: \neg(x) = 1 - x
:param x: (N,) Tensor containing the arguments of the functional negation.
:return: (N,) Tensor containing the resulting negated values.
"""
return 1 - x
class GuptaTOperators(TOperators):
"""
Gupta T-operators [1]
[1] Gupta, M. M. et al. - Theory of T-norms and fuzzy inference methods - Fuzzy Sets and Systems, Vol. 40, 1991, 431-450.
"""
def norm(self, x, y):
"""
.. math:: \top(x, y) = xy / (x + y - xy)
:param x: (N,) Tensor containing the first arguments of the t-norm.
:param y: (N,) Tensor containing the second arguments of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
return (x * y) / (x + y - x * y)
def conorm(self, x, y):
"""
.. math:: \top*(x, y) = (x + y - 2xy) / (1 - x * y)
:param x: (N,) Tensor containing the first arguments of the t-conorm.
:param y: (N,) Tensor containing the second arguments of the t-conorm.
:return: (N,) Tensor containing the resulting t-conorm values.
"""
return (x + y - 2 * x * y) / (1 - x * y)
def negation(self, x):
"""
.. math:: \neg(x) = 1 - x
:param x: (N,) Tensor containing the arguments of the functional negation.
:return: (N,) Tensor containing the resulting negated values.
"""
return 1 - x
class HamacherTOperators(TOperators):
"""
Hamacher T-operators [1, 2]
[1] Weber, S. - A general concept of fuzzy connectives, negations and implications based on t-norms and t-conorms - Fuzzy Sets and Systems 11 (1983) 115-134.
[2] Gupta, M. M. et al. - Theory of T-norms and fuzzy inference methods - Fuzzy Sets and Systems, Vol. 40, 1991, 431-450.
"""
def __init__(self, lamda=1.0):
super().__init__()
self.lamda = lamda
def norm(self, x, y):
"""
.. math:: \top(x, y) = \lambda xy / (1 - (1 - \lambda) (x + y - xy))
:param x: (N,) Tensor containing the first arguments of the t-norm.
:param y: (N,) Tensor containing the second arguments of the t-norm.
:return: (N,) Tensor containing the resulting t-norm values.
"""
return (self.lamda * x * y) / (1 - (1 - self.lamda) * (x + y - x * y))
def conorm(self, x, y):
"""
.. math:: \top*(x, y) = (\lambda (x + y) + xy (1 - 2 \lambda)) / (\lambda + xy (1 - \lambda))
:param x: (N,) Tensor containing the first arguments of the t-conorm.
:param y: (N,) Tensor containing the second arguments of the t-conorm.
:return: (N,) Tensor containing the resulting t-conorm values.
"""
return (self.lamda * (x + y) + x * y * (1 - 2 * self.lamda)) / (self.lamda + x * y (1 - self.lamda))
def negation(self, x):
"""
.. math:: \neg(x) = 1 - x
:param x: (N,) Tensor containing the arguments of the functional negation.
:return: (N,) Tensor containing the resulting negated values.
"""
return 1 - x
zadeh = Zadeh = ZadehTOperators
probabilistic = Probabilistic = ProbabilisticTOperators
lukasiewicz = Lukasiewicz = LukasiewiczTOperators
gupta = Gupta = GuptaTOperators
hamacher = Hamacher = HamacherTOperators
def get_function(function_name):
this_module = sys.modules[__name__]
if not hasattr(this_module, function_name):
raise ValueError('Unknown operators: {}'.format(function_name))
return getattr(this_module, function_name)
| 35.572581
| 161
| 0.589889
| 1,217
| 8,822
| 4.259655
| 0.121611
| 0.020062
| 0.157407
| 0.185185
| 0.783951
| 0.762346
| 0.75598
| 0.75598
| 0.75135
| 0.728974
| 0
| 0.026749
| 0.283836
| 8,822
| 247
| 162
| 35.716599
| 0.793764
| 0.659034
| 0
| 0.47541
| 0
| 0
| 0.009231
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.327869
| false
| 0
| 0.04918
| 0
| 0.737705
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
d01b277f4a3c9ef84b3e4c3334fc9f1500a380b9
| 4,807
|
py
|
Python
|
response.py
|
carlfarterson/telegrambots
|
adce297620403b2369d0e6b3ac8897537d809d5e
|
[
"MIT"
] | null | null | null |
response.py
|
carlfarterson/telegrambots
|
adce297620403b2369d0e6b3ac8897537d809d5e
|
[
"MIT"
] | null | null | null |
response.py
|
carlfarterson/telegrambots
|
adce297620403b2369d0e6b3ac8897537d809d5e
|
[
"MIT"
] | null | null | null |
response = {
'content_type': 'text',
'message_id': 37,
'from_user': {
'id': 522933028,
'is_bot': False,
'first_name': 'Carter',
'username': 'carlfarterson',
'last_name': 'Carlson',
'language_code': 'en'
},
'date': 1579318056,
'chat': {
'type': 'private',
'last_name': 'Carlson',
'first_name': 'Carter',
'username': 'carlfarterson',
'id': 522933028,
'title': None,
'all_members_are_administrators': None,
'photo': None,
'description': None,
'invite_link': None,
'pinned_message': None,
'sticker_set_name': None,
'can_set_sticker_set': None
},
'forward_from_chat': None,
'forward_from_message_id': None,
'forward_from': None,
'forward_date': None,
'reply_to_message': None,
'edit_date': None,
'media_group_id': None,
'author_signature': None,
'text': 'd@d',
'entities': None,
'caption_entities': None,
'audio': None,
'document': None,
'photo': None,
'sticker': None,
'video': None,
'video_note': None,
'voice': None,
'caption': None,
'contact': None,
'location': None,
'venue': None,
'animation': None,
'new_chat_member': None,
'new_chat_members': None,
'left_chat_member': None,
'new_chat_title': None,
'new_chat_photo': None,
'delete_chat_photo': None,
'group_chat_created': None,
'supergroup_chat_created': None,
'channel_chat_created': None,
'migrate_to_chat_id': None,
'migrate_from_chat_id': None,
'pinned_message': None,
'invoice': None,
'successful_payment': None,
'connected_website': None,
'json': {
'message_id': 37,
'from': {
'id': 522933028,
'is_bot': False,
'first_name': 'Carter',
'last_name': 'Carlson',
'username': 'carlfarterson',
'language_code': 'en'
},
'chat': {
'id': 522933028,
'first_name': 'Carter',
'last_name': 'Carlson',
'username': 'carlfarterson',
'type': 'private'
},
'date': 1579318056,
'text': 'd@d'
}
}
response2 = {
'content_type': 'text',
'message_id': 39,
'from_user': {
'id': 522933028,
'is_bot': False,
'first_name': 'Carter',
'username': 'carlfarterson',
'last_name': 'Carlson',
'language_code': 'en'
},
'date': 1579319076,
'chat': {
'type': 'private',
'last_name': 'Carlson',
'first_name': 'Carter',
'username': 'carlfarterson',
'id': 522933028,
'title': None,
'all_members_are_administrators': None,
'photo': None,
'description': None,
'invite_link': None,
'pinned_message': None,
'sticker_set_name': None,
'can_set_sticker_set': None
},
'forward_from_chat': None,
'forward_from_message_id': None,
'forward_from': None,
'forward_date': None,
'reply_to_message': None,
'edit_date': None,
'media_group_id': None,
'author_signature': None,
'text': 'email2@notreal',
'entities': None,
'caption_entities': None,
'audio': None,
'document': None,
'photo': None,
'sticker': None,
'video': None,
'video_note': None,
'voice': None,
'caption': None,
'contact': None,
'location': None,
'venue': None,
'animation': None,
'new_chat_member': None,
'new_chat_members': None,
'left_chat_member': None,
'new_chat_title': None,
'new_chat_photo': None,
'delete_chat_photo': None,
'group_chat_created': None,
'supergroup_chat_created': None,
'channel_chat_created': None,
'migrate_to_chat_id': None,
'migrate_from_chat_id': None,
'pinned_message': None,
'invoice': None,
'successful_payment': None,
'connected_website': None,
'json': {
'message_id': 39,
'from': {
'id': 522933028,
'is_bot': False,
'first_name': 'Carter',
'last_name': 'Carlson',
'username': 'carlfarterson',
'language_code': 'en'
},
'chat': {
'id': 522933028,
'first_name': 'Carter',
'last_name': 'Carlson',
'username': 'carlfarterson',
'type': 'private'
},
'date': 1579319076,
'text': 'email2@notreal'
}
}
response.keys()
print(response['message_id'])
print(response['from_user'])
print(response['date'])
print(response['chat'])
user = response['from_user']
first_name = user['first_name']
last_name = user['last_name']
username = user['username']
response2
| 25.433862
| 47
| 0.545038
| 482
| 4,807
| 5.139004
| 0.170124
| 0.036334
| 0.048446
| 0.025838
| 0.885749
| 0.866371
| 0.866371
| 0.866371
| 0.866371
| 0.866371
| 0
| 0.036546
| 0.294154
| 4,807
| 188
| 48
| 25.569149
| 0.693487
| 0
| 0
| 0.846154
| 0
| 0
| 0.412523
| 0.031621
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.021978
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
19052adab8281086a48626f76312ae0c45ef402b
| 136
|
py
|
Python
|
gym_trader/envs/__init__.py
|
jajimer/gym-trader
|
5ccb7bfbd280a1260ea56adbe1c6e2fdc66ac415
|
[
"MIT"
] | null | null | null |
gym_trader/envs/__init__.py
|
jajimer/gym-trader
|
5ccb7bfbd280a1260ea56adbe1c6e2fdc66ac415
|
[
"MIT"
] | null | null | null |
gym_trader/envs/__init__.py
|
jajimer/gym-trader
|
5ccb7bfbd280a1260ea56adbe1c6e2fdc66ac415
|
[
"MIT"
] | null | null | null |
from gym_trader.envs.single_continuous_env import SingleContinuousEnv
from gym_trader.envs.single_discrete_env import SingleDiscreteEnv
| 45.333333
| 69
| 0.911765
| 18
| 136
| 6.555556
| 0.611111
| 0.118644
| 0.220339
| 0.288136
| 0.389831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 136
| 2
| 70
| 68
| 0.921875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ef8f6bff1d94b3c168f7e451fbd5c91a4ee8a6f8
| 25,168
|
py
|
Python
|
opsgenie_swagger/api/team_api.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | null | null | null |
opsgenie_swagger/api/team_api.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | null | null | null |
opsgenie_swagger/api/team_api.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | 1
|
2020-11-07T11:27:13.000Z
|
2020-11-07T11:27:13.000Z
|
# coding: utf-8
"""
OpsGenie REST API
OpsGenie OpenAPI Specification # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from opsgenie_swagger.api_client import ApiClient
class TeamApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_team(self, body, **kwargs): # noqa: E501
"""Create Team # noqa: E501
Creates a new team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_team(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTeamPayload body: Request payload of created team (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_team_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_team_with_http_info(body, **kwargs) # noqa: E501
return data
def create_team_with_http_info(self, body, **kwargs): # noqa: E501
"""Create Team # noqa: E501
Creates a new team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_team_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateTeamPayload body: Request payload of created team (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_team`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/teams', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_team(self, identifier, **kwargs): # noqa: E501
"""Delete Team # noqa: E501
Delete team with given id or name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_team(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param str identifier_type: Type of the identifier. Possible values are 'id' and 'name'. Default value is 'id'
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_team_with_http_info(identifier, **kwargs) # noqa: E501
else:
(data) = self.delete_team_with_http_info(identifier, **kwargs) # noqa: E501
return data
def delete_team_with_http_info(self, identifier, **kwargs): # noqa: E501
"""Delete Team # noqa: E501
Delete team with given id or name # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_team_with_http_info(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param str identifier_type: Type of the identifier. Possible values are 'id' and 'name'. Default value is 'id'
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['identifier', 'identifier_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'identifier' is set
if ('identifier' not in params or
params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `delete_team`") # noqa: E501
collection_formats = {}
path_params = {}
if 'identifier' in params:
path_params['identifier'] = params['identifier'] # noqa: E501
query_params = []
if 'identifier_type' in params:
query_params.append(('identifierType', params['identifier_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/teams/{identifier}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_team(self, identifier, **kwargs): # noqa: E501
"""Get Team # noqa: E501
Returns team with given 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_team(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param str identifier_type: Type of the identifier. Possible values are 'id' and 'name'. Default value is 'id'
:return: GetTeamResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_team_with_http_info(identifier, **kwargs) # noqa: E501
else:
(data) = self.get_team_with_http_info(identifier, **kwargs) # noqa: E501
return data
def get_team_with_http_info(self, identifier, **kwargs): # noqa: E501
"""Get Team # noqa: E501
Returns team with given 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_team_with_http_info(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param str identifier_type: Type of the identifier. Possible values are 'id' and 'name'. Default value is 'id'
:return: GetTeamResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['identifier', 'identifier_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'identifier' is set
if ('identifier' not in params or
params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `get_team`") # noqa: E501
collection_formats = {}
path_params = {}
if 'identifier' in params:
path_params['identifier'] = params['identifier'] # noqa: E501
query_params = []
if 'identifier_type' in params:
query_params.append(('identifierType', params['identifier_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/teams/{identifier}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetTeamResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_team_logs(self, identifier, **kwargs): # noqa: E501
"""List Team Logs # noqa: E501
Return logs of a team given with identifier # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_team_logs(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param str identifier_type: Type of the identifier. Possible values are 'id' and 'name'. Default value is 'id'
:param int limit: Maximum number of items to provide in the result. Must be a positive integer value.
:param str order: Sorting order of the result set
:param str offset: Key which will be used in pagination
:return: ListTeamLogsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_team_logs_with_http_info(identifier, **kwargs) # noqa: E501
else:
(data) = self.list_team_logs_with_http_info(identifier, **kwargs) # noqa: E501
return data
def list_team_logs_with_http_info(self, identifier, **kwargs): # noqa: E501
"""List Team Logs # noqa: E501
Return logs of a team given with identifier # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_team_logs_with_http_info(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param str identifier_type: Type of the identifier. Possible values are 'id' and 'name'. Default value is 'id'
:param int limit: Maximum number of items to provide in the result. Must be a positive integer value.
:param str order: Sorting order of the result set
:param str offset: Key which will be used in pagination
:return: ListTeamLogsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['identifier', 'identifier_type', 'limit', 'order', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_team_logs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'identifier' is set
if ('identifier' not in params or
params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `list_team_logs`") # noqa: E501
if 'limit' in params and params['limit'] > 100: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `list_team_logs`, must be a value less than or equal to `100`") # noqa: E501
if 'limit' in params and params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `list_team_logs`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'identifier' in params:
path_params['identifier'] = params['identifier'] # noqa: E501
query_params = []
if 'identifier_type' in params:
query_params.append(('identifierType', params['identifier_type'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'order' in params:
query_params.append(('order', params['order'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/teams/{identifier}/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListTeamLogsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_teams(self, **kwargs): # noqa: E501
"""List Teams # noqa: E501
Return list of teams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_teams(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] expand: Returns more detailed response with expanding it. Possible value is 'member' which is also returned with expandable field of response
:return: ListTeamsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_teams_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_teams_with_http_info(**kwargs) # noqa: E501
return data
def list_teams_with_http_info(self, **kwargs): # noqa: E501
"""List Teams # noqa: E501
Return list of teams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_teams_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] expand: Returns more detailed response with expanding it. Possible value is 'member' which is also returned with expandable field of response
:return: ListTeamsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['expand'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_teams" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'expand' in params:
query_params.append(('expand', params['expand'])) # noqa: E501
collection_formats['expand'] = 'csv' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/teams', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListTeamsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_team(self, identifier, **kwargs): # noqa: E501
"""Update Team (Partial) # noqa: E501
Update team with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_team(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param UpdateTeamPayload body: Request payload of update team action
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_team_with_http_info(identifier, **kwargs) # noqa: E501
else:
(data) = self.update_team_with_http_info(identifier, **kwargs) # noqa: E501
return data
def update_team_with_http_info(self, identifier, **kwargs): # noqa: E501
"""Update Team (Partial) # noqa: E501
Update team with given id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_team_with_http_info(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of the team (required)
:param UpdateTeamPayload body: Request payload of update team action
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['identifier', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'identifier' is set
if ('identifier' not in params or
params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `update_team`") # noqa: E501
collection_formats = {}
path_params = {}
if 'identifier' in params:
path_params['identifier'] = params['identifier'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/teams/{identifier}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.697161
| 166
| 0.609226
| 2,893
| 25,168
| 5.089872
| 0.071552
| 0.05107
| 0.022818
| 0.029338
| 0.943837
| 0.931002
| 0.923803
| 0.911986
| 0.907912
| 0.894873
| 0
| 0.017333
| 0.300819
| 25,168
| 633
| 167
| 39.759874
| 0.819458
| 0.349889
| 0
| 0.768546
| 1
| 0.005935
| 0.194956
| 0.032626
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038576
| false
| 0
| 0.011869
| 0
| 0.106825
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
efbd3f134463b74cc54fe8ff9612d733de27ac9c
| 5,790
|
py
|
Python
|
test/unit/test_glob.py
|
jimporter/mopack
|
e912be11528645f5463e7873b5470c420b698418
|
[
"BSD-3-Clause"
] | null | null | null |
test/unit/test_glob.py
|
jimporter/mopack
|
e912be11528645f5463e7873b5470c420b698418
|
[
"BSD-3-Clause"
] | 17
|
2020-07-23T20:28:36.000Z
|
2022-03-04T04:33:55.000Z
|
test/unit/test_glob.py
|
jimporter/mopack
|
e912be11528645f5463e7873b5470c420b698418
|
[
"BSD-3-Clause"
] | 1
|
2020-11-04T03:51:20.000Z
|
2020-11-04T03:51:20.000Z
|
from unittest import TestCase
from mopack.glob import *
class TestFilterGlob(TestCase):
_common_paths = ['foo', 'foo/', 'foo/bar', 'foobar', 'bar', 'bar/',
'bar/foo', 'bar/foo/', 'bar/foo/baz', 'bar/baz/foo',
'baz/bar/foo']
def _glob(self, pattern, paths=None, **kwargs):
return list(filter_glob(pattern, paths or self._common_paths,
**kwargs))
def test_absolute_simple(self):
self.assertEqual(self._glob('/foo'), ['foo', 'foo/', 'foo/bar'])
self.assertEqual(self._glob('/foo/'), ['foo/', 'foo/bar'])
def test_relative_simple(self):
self.assertEqual(self._glob('foo'),
['foo', 'foo/', 'foo/bar', 'bar/foo', 'bar/foo/',
'bar/foo/baz', 'bar/baz/foo', 'baz/bar/foo'])
self.assertEqual(self._glob('foo/'),
['foo/', 'foo/bar', 'bar/foo/', 'bar/foo/baz'])
def test_absolute_multi(self):
self.assertEqual(self._glob('/bar/foo'),
['bar/foo', 'bar/foo/', 'bar/foo/baz'])
self.assertEqual(self._glob('/bar/foo/'), ['bar/foo/', 'bar/foo/baz'])
def test_relative_multi(self):
self.assertEqual(self._glob('bar/foo'),
['bar/foo', 'bar/foo/', 'bar/foo/baz', 'baz/bar/foo'])
self.assertEqual(self._glob('bar/foo/'), ['bar/foo/', 'bar/foo/baz'])
def test_absolute_glob(self):
self.assertEqual(self._glob('/ba*'),
['bar', 'bar/', 'bar/foo', 'bar/foo/', 'bar/foo/baz',
'bar/baz/foo', 'baz/bar/foo'])
self.assertEqual(self._glob('/ba*/'),
['bar/', 'bar/foo', 'bar/foo/', 'bar/foo/baz',
'bar/baz/foo', 'baz/bar/foo'])
self.assertEqual(self._glob('/*ar'),
['foobar', 'bar', 'bar/', 'bar/foo', 'bar/foo/',
'bar/foo/baz', 'bar/baz/foo'])
self.assertEqual(self._glob('/*ar/'),
['bar/', 'bar/foo', 'bar/foo/', 'bar/foo/baz',
'bar/baz/foo'])
def test_relative_glob(self):
self.assertEqual(self._glob('ba*'),
['foo/bar', 'bar', 'bar/', 'bar/foo', 'bar/foo/',
'bar/foo/baz', 'bar/baz/foo', 'baz/bar/foo'])
self.assertEqual(self._glob('ba*/'),
['bar/', 'bar/foo', 'bar/foo/', 'bar/foo/baz',
'bar/baz/foo', 'baz/bar/foo'])
self.assertEqual(self._glob('*ar'), [
'foo/bar', 'foobar', 'bar', 'bar/', 'bar/foo', 'bar/foo/',
'bar/foo/baz', 'bar/baz/foo', 'baz/bar/foo'
])
self.assertEqual(self._glob('*ar/'),
['bar/', 'bar/foo', 'bar/foo/', 'bar/foo/baz',
'bar/baz/foo', 'baz/bar/foo'])
def test_relative_starstar(self):
self.assertEqual(self._glob('bar/**/foo'),
['bar/foo', 'bar/foo/', 'bar/foo/baz', 'bar/baz/foo',
'baz/bar/foo'])
self.assertEqual(self._glob('bar/**/foo/'),
['bar/foo/', 'bar/foo/baz'])
def test_consecutive_starstar(self):
self.assertEqual(self._glob('bar/**/**/foo'),
['bar/foo', 'bar/foo/', 'bar/foo/baz', 'bar/baz/foo',
'baz/bar/foo'])
self.assertEqual(self._glob('bar/**/**/foo/'),
['bar/foo/', 'bar/foo/baz'])
def test_absolute_starstar_start(self):
self.assertEqual(self._glob('/**/foo'),
['foo', 'foo/', 'foo/bar', 'bar/foo', 'bar/foo/',
'bar/foo/baz', 'bar/baz/foo', 'baz/bar/foo'])
self.assertEqual(self._glob('/**/foo/'),
['foo/', 'foo/bar', 'bar/foo/', 'bar/foo/baz'])
def test_relative_starstar_start(self):
self.assertEqual(self._glob('**/foo'),
['foo', 'foo/', 'foo/bar', 'bar/foo', 'bar/foo/',
'bar/foo/baz', 'bar/baz/foo', 'baz/bar/foo'])
self.assertEqual(self._glob('**/foo/'),
['foo/', 'foo/bar', 'bar/foo/', 'bar/foo/baz'])
def test_starstar_end(self):
self.assertEqual(self._glob('foo/**'),
['foo/', 'foo/bar', 'bar/foo/', 'bar/foo/baz'])
self.assertEqual(self._glob('foo/**/'),
['foo/', 'foo/bar', 'bar/foo/', 'bar/foo/baz'])
def test_complicated(self):
self.assertEqual(self._glob('*a*/**/*.txt', [
'bar/', 'bar/file.txt', 'foo/bar/file.txt'
]), ['bar/file.txt', 'foo/bar/file.txt'])
self.assertEqual(self._glob('*a*/baz/**/*.txt', [
'bar/', 'bar/file.txt', 'bar/baz/file.txt',
'foo/bar/baz/quux/file.txt'
]), ['bar/baz/file.txt', 'foo/bar/baz/quux/file.txt'])
self.assertEqual(self._glob('*a*/**/*o*/**', [
'bar/', 'bar/foo', 'bar/foo/'
]), ['bar/foo/'])
def test_multiple(self):
self.assertEqual(self._glob(['/foo', '/bar']),
['foo', 'foo/', 'foo/bar', 'bar', 'bar/', 'bar/foo',
'bar/foo/', 'bar/foo/baz', 'bar/baz/foo'])
self.assertEqual(self._glob(['/foo/', '/bar/']),
['foo/', 'foo/bar', 'bar/', 'bar/foo', 'bar/foo/',
'bar/foo/baz', 'bar/baz/foo'])
def test_empty(self):
self.assertEqual(self._glob(''), self._common_paths)
self.assertEqual(self._glob('/'), self._common_paths)
def test_explicit_glob(self):
g = Glob('/foo')
self.assertEqual(self._glob(g), ['foo', 'foo/', 'foo/bar'])
| 45.952381
| 79
| 0.466839
| 673
| 5,790
| 3.907875
| 0.066865
| 0.223574
| 0.195057
| 0.250951
| 0.877186
| 0.84943
| 0.847909
| 0.75019
| 0.708365
| 0.708365
| 0
| 0
| 0.305181
| 5,790
| 125
| 80
| 46.32
| 0.653741
| 0
| 0
| 0.264151
| 0
| 0
| 0.284111
| 0.008636
| 0
| 0
| 0
| 0
| 0.320755
| 1
| 0.150943
| false
| 0
| 0.018868
| 0.009434
| 0.198113
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
efc0ac813da09b399cf26b6d548053e3a746c4cd
| 4,284
|
py
|
Python
|
src/layyer/discodef/mnist_model.py
|
layel2/layyer-lib
|
db48b5c38098ee93d2d34693d98e5ef4d319d919
|
[
"MIT"
] | null | null | null |
src/layyer/discodef/mnist_model.py
|
layel2/layyer-lib
|
db48b5c38098ee93d2d34693d98e5ef4d319d919
|
[
"MIT"
] | null | null | null |
src/layyer/discodef/mnist_model.py
|
layel2/layyer-lib
|
db48b5c38098ee93d2d34693d98e5ef4d319d919
|
[
"MIT"
] | null | null | null |
import torch
from torch import nn
import torch.nn.functional as F
class mnistmodel_A(nn.Module):
def __init__(self):
super(mnistmodel_A, self).__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=64, kernel_size=5, stride=1, padding=2)
self.conv2 = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=5, stride=2)
self.dense1 = nn.Linear(in_features=64 * 12 * 12, out_features=128)
self.dense2 = nn.Linear(in_features=128, out_features=10)
def forward(self, x):
x = F.relu(self.conv1(x))
# x = F.max_pool2d(x,2)
x = F.relu(self.conv2(x))
# x = F.max_pool2d(x,2)
x = F.dropout(x, 0.25)
x = x.view(-1, 64 * 12 * 12)
x = F.relu(self.dense1(x))
x = F.dropout(x, 0.5)
x = self.dense2(x)
return x
class mnistmodel_B(nn.Module):
def __init__(self):
super(mnistmodel_B, self).__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=64, kernel_size=8, stride=2, padding=3)
self.conv2 = nn.Conv2d(in_channels=64, out_channels=128, kernel_size=6, stride=2)
self.conv3 = nn.Conv2d(in_channels=128, out_channels=128, kernel_size=5, stride=1)
self.dense1 = nn.Linear(in_features=128, out_features=128)
self.dense2 = nn.Linear(in_features=128, out_features=10)
def forward(self, x):
x = F.dropout(x, 0.2)
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = x.view(-1, 128)
x = F.relu(self.dense1(x))
x = F.dropout(x, 0.5)
x = self.dense2(x)
return x
class mnistmodel_C(nn.Module):
def __init__(self):
super(mnistmodel_C, self).__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=128, kernel_size=3, padding=1)
self.conv2 = nn.Conv2d(in_channels=128, out_channels=64, kernel_size=5, stride=2)
self.dense1 = nn.Linear(in_features=12 * 12 * 64, out_features=128)
self.dense2 = nn.Linear(in_features=128, out_features=10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = x.view(-1, 12 * 12 * 64)
x = F.dropout(x, 0.25)
x = F.relu(self.dense1(x))
x = F.dropout(x, 0.5)
x = self.dense2(x)
return x
class mnistmodel_B_old(nn.Module):
def __init__(self):
super(mnistmodel_B, self).__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=64, kernel_size=8, stride=2, padding=3)
self.conv2 = nn.Conv2d(in_channels=64, out_channels=128, kernel_size=6, stride=2)
self.conv3 = nn.Conv2d(in_channels=128, out_channels=128, kernel_size=5, stride=1)
self.dense1 = nn.Linear(in_features=128, out_features=120)
self.dense2 = nn.Linear(in_features=120, out_features=84)
self.dense3 = nn.Linear(in_features=84, out_features=10)
def forward(self, x):
x = F.dropout(x, 0.2)
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = x.view(-1, 128)
x = F.relu(self.dense1(x))
x = F.relu(self.dense2(x))
x = self.dense3(x)
return x
class mnistmodel_C_old(nn.Module):
def __init__(self):
super(mnistmodel_C, self).__init__()
self.conv1 = nn.Conv2d(in_channels=1, out_channels=128, kernel_size=3, padding=1)
self.conv2 = nn.Conv2d(in_channels=128, out_channels=64, kernel_size=5, stride=2)
self.dense1 = nn.Linear(in_features=12 * 12 * 64, out_features=120)
self.dense2 = nn.Linear(in_features=120, out_features=84)
self.dense3 = nn.Linear(in_features=84, out_features=10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = x.view(-1, 12 * 12 * 64)
x = F.dropout(x, 0.25)
x = F.relu(self.dense1(x))
x = F.relu(self.dense2(x))
x = F.dropout(x, 0.5)
x = self.dense3(x)
return x
def getmodel(model=mnistmodel_A(), load_path=None):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if not load_path is None:
model.load_state_dict(torch.load(load_path, map_location=device))
model.to(device)
return model
| 36.305085
| 98
| 0.611111
| 691
| 4,284
| 3.612156
| 0.107091
| 0.024038
| 0.022837
| 0.076122
| 0.891827
| 0.891426
| 0.871795
| 0.850561
| 0.842949
| 0.813301
| 0
| 0.082073
| 0.243464
| 4,284
| 117
| 99
| 36.615385
| 0.688059
| 0.010037
| 0
| 0.760417
| 0
| 0
| 0.001652
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.114583
| false
| 0
| 0.03125
| 0
| 0.260417
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.