hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f32e08be6df10b5c2d00272b4d1323a56028b5cb
| 45
|
py
|
Python
|
algo/mappo2/elements/agent.py
|
xlnwel/g2rl
|
e1261fdd2ce70724a99ddd174616cf013917b241
|
[
"Apache-2.0"
] | 1
|
2022-03-27T08:25:57.000Z
|
2022-03-27T08:25:57.000Z
|
algo/ppo/elements/agent.py
|
xlnwel/g2rl
|
e1261fdd2ce70724a99ddd174616cf013917b241
|
[
"Apache-2.0"
] | null | null | null |
algo/ppo/elements/agent.py
|
xlnwel/g2rl
|
e1261fdd2ce70724a99ddd174616cf013917b241
|
[
"Apache-2.0"
] | 1
|
2021-11-09T08:33:35.000Z
|
2021-11-09T08:33:35.000Z
|
from core.elements.agent import create_agent
| 22.5
| 44
| 0.866667
| 7
| 45
| 5.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 45
| 1
| 45
| 45
| 0.926829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f373e6647e8c7d37bf81604c3eda5ebce9a6840c
| 8,695
|
py
|
Python
|
tests/test_validating.py
|
ealesid/starlette-jsonrpc
|
a1c21f3cd41adbde72dd2e5d24a2a13f36d77366
|
[
"MIT"
] | 29
|
2019-04-10T12:15:13.000Z
|
2021-02-24T05:01:57.000Z
|
tests/test_validating.py
|
ealesid/starlette-jsonrpc
|
a1c21f3cd41adbde72dd2e5d24a2a13f36d77366
|
[
"MIT"
] | 7
|
2019-04-12T21:20:44.000Z
|
2019-12-01T13:58:05.000Z
|
tests/test_validating.py
|
ealesid/starlette-jsonrpc
|
a1c21f3cd41adbde72dd2e5d24a2a13f36d77366
|
[
"MIT"
] | 6
|
2019-04-10T12:18:13.000Z
|
2021-11-19T09:11:31.000Z
|
from . import client
# JSON
def test_payload_as_empty_dict():
payload = {}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "None",
"error": {"code": -32600, "message": "Invalid Request.", "data": {}},
}
def test_payload_as_empty_list():
payload = []
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "None",
"error": {"code": -32600, "message": "Invalid Request.", "data": {}},
}
def test_incorrect_payload():
payload = [1]
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "None",
"error": {"code": -32600, "message": "Invalid Request.", "data": {}},
}
# PARAMS
def test_positional_parameters():
payload = {
"jsonrpc": "2.0",
"method": "subtract_positional",
"params": [42, 23],
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": "1", "result": 19}
def test_positional_parameters_2():
payload = {
"jsonrpc": "2.0",
"method": "subtract_positional",
"params": [23, 42],
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": "1", "result": -19}
def test_named_parameters():
payload = {
"jsonrpc": "2.0",
"method": "SubtractMethod",
"params": {"x": 42, "y": 23},
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": "1", "result": 19}
def test_named_parameters_2():
payload = {
"jsonrpc": "2.0",
"method": "SubtractMethod",
"params": {"y": 23, "x": 42},
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": "1", "result": 19}
def test_named_parameters_3():
payload = {
"jsonrpc": "2.0",
"method": "sum",
"params": {"x": 42, "y": 23},
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": "1", "result": {"sum": 65}}
def test_params_not_object():
payload = {"jsonrpc": "2.0", "method": "subtract", "params": "", "id": "1"}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"params": "Did not match any valid type."},
},
}
def test_params_as_invalid_object():
payload = {"jsonrpc": "2.0", "method": "subtract", "params": {}, "id": "1"}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"params": "Required param: 'x'"},
},
}
def test_params_as_invalid_list():
payload = {
"jsonrpc": "2.0",
"method": "subtract_positional",
"params": [1],
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {
"params": "subtract_positional() missing 1 required positional argument: 'y'"
},
},
}
def test_without_params():
payload = {"jsonrpc": "2.0", "method": "my_method", "id": "1"}
response = client.post("/api/", json=payload)
assert response.status_code == 200
# ID
def test_id_as_integer():
payload = {
"jsonrpc": "2.0",
"method": "subtract",
"params": {"x": 42, "y": 23},
"id": 1,
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": 1, "result": 19}
def test_id_as_string():
payload = {
"jsonrpc": "2.0",
"method": "subtract",
"params": {"x": 42, "y": 23},
"id": "abc",
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": "abc", "result": 19}
def test_id_as_null():
payload = {
"jsonrpc": "2.0",
"method": "subtract",
"params": {"x": 42, "y": 23},
"id": None,
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": None, "result": 19}
def test_empty_id():
payload = {
"jsonrpc": "2.0",
"method": "subtract",
"params": {"x": 42, "y": 23},
"id": "",
}
response = client.post("/api/", json=payload)
assert response.json() == {"jsonrpc": "2.0", "id": None, "result": 19}
def test_notification():
"""
Notification
"""
payload = {"jsonrpc": "2.0", "method": "subtract", "params": {"x": 42, "y": 23}}
response = client.post("/api/", json=payload)
assert response.json() == {}
# JSONRPC
def test_jsonrpc_as_integer():
payload = {
"jsonrpc": 2,
"method": "subtract",
"params": {"x": 42, "y": 23},
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"jsonrpc": "Must be a string."},
},
}
def test_empty_jsonrpc():
payload = {
"jsonrpc": "",
"method": "subtract",
"params": {"x": 42, "y": 23},
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"jsonrpc": "Must not be blank."},
},
}
def test_jsonrpc_wrong_value():
payload = {
"jsonrpc": "3.0",
"method": "subtract",
"params": {"x": 42, "y": 23},
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"jsonrpc": "Must match the pattern /2.0/."},
},
}
def test_without_jsonrpc():
payload = {"method": "subtract", "params": {"x": 42, "y": 23}, "id": "1"}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"jsonrpc": "This field is required."},
},
}
# METHOD
def test_not_registered_method():
payload = {
"jsonrpc": "2.0",
"method": "non_existing_method",
"params": {"x": 42, "y": 23},
"id": "1",
}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {"code": -32601, "message": "Method not found.", "data": {}},
}
def test_without_method():
payload = {"jsonrpc": "2.0", "params": {"x": 42, "y": 23}, "id": "1"}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"method": "This field is required."},
},
}
def test_with_empty_method():
payload = {"jsonrpc": "2.0", "method": "", "params": {"x": 42, "y": 23}, "id": "1"}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"method": "Must not be blank."},
},
}
def test_method_as_integer():
payload = {"jsonrpc": "2.0", "method": 1, "params": {"x": 42, "y": 23}, "id": "1"}
response = client.post("/api/", json=payload)
assert response.json() == {
"jsonrpc": "2.0",
"id": "1",
"error": {
"code": -32602,
"message": "Invalid params.",
"data": {"method": "Must be a string."},
},
}
# def test_with_method_name_starting_with_rpc_period():
# pass
| 25.423977
| 93
| 0.477286
| 919
| 8,695
| 4.424374
| 0.095756
| 0.082637
| 0.090753
| 0.12912
| 0.849238
| 0.807673
| 0.771028
| 0.739056
| 0.705116
| 0.705116
| 0
| 0.047979
| 0.300058
| 8,695
| 341
| 94
| 25.498534
| 0.620112
| 0.012191
| 0
| 0.60223
| 0
| 0
| 0.240458
| 0.002451
| 0
| 0
| 0
| 0
| 0.092937
| 1
| 0.092937
| false
| 0
| 0.003717
| 0
| 0.096654
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f3a2955ed1e3a3bdbe87b69cc25dac7623b16835
| 256,828
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_policy_repository_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_policy_repository_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/_meta/_Cisco_IOS_XR_policy_repository_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import re
import collections
from enum import Enum
from ydk._core._dm_meta_info import _MetaInfoClassMember, _MetaInfoClass, _MetaInfoEnum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk._core._dm_meta_info import ATTRIBUTE, REFERENCE_CLASS, REFERENCE_LIST, REFERENCE_LEAFLIST, REFERENCE_IDENTITY_CLASS, REFERENCE_ENUM_CLASS, REFERENCE_BITS, REFERENCE_UNION
from ydk.errors import YPYError, YPYModelError
from ydk.providers._importer import _yang_ns
_meta_table = {
'GroupEnum' : _MetaInfoEnum('GroupEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper',
{
'address-family-group':'address_family_group',
'session-group':'session_group',
'neighbor-group':'neighbor_group',
'neighbor':'neighbor',
'error-group':'error_group',
}, 'Cisco-IOS-XR-policy-repository-oper', _yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper']),
'AttachPointDirectionEnum' : _MetaInfoEnum('AttachPointDirectionEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper',
{
'in':'in_',
'out':'out',
}, 'Cisco-IOS-XR-policy-repository-oper', _yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper']),
'SubAddressFamilyEnum' : _MetaInfoEnum('SubAddressFamilyEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper',
{
'unicast':'unicast',
'multicast':'multicast',
'label':'label',
'tunnel':'tunnel',
'vpn':'vpn',
'mdt':'mdt',
'vpls':'vpls',
'rt-constraint':'rt_constraint',
'mvpn':'mvpn',
'flow':'flow',
'vpn-mcast':'vpn_mcast',
'saf-none':'saf_none',
'saf-unknown':'saf_unknown',
}, 'Cisco-IOS-XR-policy-repository-oper', _yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper']),
'AddressFamilyEnum' : _MetaInfoEnum('AddressFamilyEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper',
{
'ipv4':'ipv4',
'ipv6':'ipv6',
'l2vpn':'l2vpn',
'ls':'ls',
'af-none':'af_none',
'af-unknown':'af_unknown',
}, 'Cisco-IOS-XR-policy-repository-oper', _yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper']),
'ObjectStatusEnum' : _MetaInfoEnum('ObjectStatusEnum', 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper',
{
'active':'active',
'inactive':'inactive',
'unused':'unused',
}, 'Cisco-IOS-XR-policy-repository-oper', _yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper']),
'RoutingPolicy.Limits' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Limits',
False,
[
_MetaInfoClassMember('compiled-policies-length', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' The total compiled length of all policies
''',
'compiled_policies_length',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('current-lines-of-policy-limit', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of lines of configuration for
policies/sets currently allowed
''',
'current_lines_of_policy_limit',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('current-lines-of-policy-used', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Current number of lines configured for all
policies and sets
''',
'current_lines_of_policy_used',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('current-number-of-policies-limit', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Number of policies currently allowed
''',
'current_number_of_policies_limit',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('current-number-of-policies-used', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Current number of policies configured
''',
'current_number_of_policies_used',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('maximum-lines-of-policy', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum lines of configuration allowable for all
policies and sets
''',
'maximum_lines_of_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('maximum-number-of-policies', ATTRIBUTE, 'int' , None, None,
[('0', '4294967295')], [],
''' Maximum number of policies allowable
''',
'maximum_number_of_policies',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'limits',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedPolicies' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedPolicies',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'directly-used-policies',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets.Sets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets.Sets',
False,
[
_MetaInfoClassMember('set-domain', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain of sets
''',
'set_domain',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('set-name', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Names of sets in this domain
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets',
False,
[
_MetaInfoClassMember('sets', REFERENCE_LIST, 'Sets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets.Sets',
[], [],
''' List of sets in several domains
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'all-used-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets.Sets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets.Sets',
False,
[
_MetaInfoClassMember('set-domain', ATTRIBUTE, 'str' , None, None,
[], [],
''' Domain of sets
''',
'set_domain',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('set-name', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Names of sets in this domain
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets',
False,
[
_MetaInfoClassMember('sets', REFERENCE_LIST, 'Sets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets.Sets',
[], [],
''' List of sets in several domains
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'directly-used-sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedPolicies' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedPolicies',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'all-used-policies',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses',
False,
[
_MetaInfoClassMember('all-used-policies', REFERENCE_CLASS, 'AllUsedPolicies' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedPolicies',
[], [],
''' Policies used by this policy, or by policies
that it uses
''',
'all_used_policies',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('all-used-sets', REFERENCE_CLASS, 'AllUsedSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets',
[], [],
''' Sets used by this policy, or by policies
that it uses
''',
'all_used_sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('directly-used-policies', REFERENCE_CLASS, 'DirectlyUsedPolicies' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedPolicies',
[], [],
''' Policies that this policy uses directly
''',
'directly_used_policies',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('directly-used-sets', REFERENCE_CLASS, 'DirectlyUsedSets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets',
[], [],
''' Sets that this policy uses directly
''',
'directly_used_sets',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'policy-uses',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies.RoutePolicy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies.RoutePolicy',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Route policy name
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('policy-uses', REFERENCE_CLASS, 'PolicyUses' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses',
[], [],
''' Information about which policies and sets
this policy uses
''',
'policy_uses',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'route-policy',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.RoutePolicies' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.RoutePolicies',
False,
[
_MetaInfoClassMember('route-policy', REFERENCE_LIST, 'RoutePolicy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies.RoutePolicy',
[], [],
''' Information about an individual policy
''',
'route_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'route-policies',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Policies' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Policies',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policies', REFERENCE_CLASS, 'RoutePolicies' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.RoutePolicies',
[], [],
''' Information about individual policies
''',
'route_policies',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'policies',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.OspfArea' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.OspfArea',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'ospf-area',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityOpaque' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityOpaque',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'extended-community-opaque',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySegNh' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySegNh',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'extended-community-seg-nh',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunitySoo' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunitySoo',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'extended-community-soo',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Tag' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Tag',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'tag',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Prefix' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Prefix',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'prefix',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Community' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Community',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'community',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.AsPath' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.AsPath',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'as-path',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityBandwidth' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityBandwidth',
False,
[
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'extended-community-bandwidth',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityRt' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityRt',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'extended-community-rt',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.Rd' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.Rd',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'rd',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy.Reference' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy.Reference',
False,
[
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of policy
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('status', REFERENCE_ENUM_CLASS, 'ObjectStatusEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'ObjectStatusEnum',
[], [],
''' Active, Inactive, or Unused
''',
'status',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-directly', ATTRIBUTE, 'bool' , None, None,
[], [],
''' Whether the policy uses this object directly or
indirectly
''',
'used_directly',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'reference',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy',
False,
[
_MetaInfoClassMember('reference', REFERENCE_LIST, 'Reference' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy.Reference',
[], [],
''' Information about policies referring to this
object
''',
'reference',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'used-by',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached.Binding' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached.Binding',
False,
[
_MetaInfoClassMember('af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Address Family Identifier
''',
'af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('aggregate-network-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Aggregate IP address or Network IP Address
in IPv4 or IPv6 Format
''',
'aggregate_network_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('area-id', ATTRIBUTE, 'str' , None, None,
[], [],
''' OSPF Area ID in Decimal Integer Format
''',
'area_id',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attach-point', ATTRIBUTE, 'str' , None, None,
[], [],
''' Name of attach point where policy is attached
''',
'attach_point',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('attached-policy', ATTRIBUTE, 'str' , None, None,
[], [],
''' The attached policy that (maybe indirectly) uses
the object in question
''',
'attached_policy',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('direction', REFERENCE_ENUM_CLASS, 'AttachPointDirectionEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AttachPointDirectionEnum',
[], [],
''' Direction In or Out
''',
'direction',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group', REFERENCE_ENUM_CLASS, 'GroupEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'GroupEnum',
[], [],
''' Neighbor Group
''',
'group',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('group-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor Group Name
''',
'group_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Instance
''',
'instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('interface-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Interface Name
''',
'interface_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-address', ATTRIBUTE, 'str' , None, None,
[], [],
''' Neighbor IP Address
''',
'neighbor_address',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('neighbor-af-name', REFERENCE_ENUM_CLASS, 'AddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'AddressFamilyEnum',
[], [],
''' Neighbor IP Address Family
''',
'neighbor_af_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-from', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate From Level
''',
'propogate_from',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('propogate-to', ATTRIBUTE, 'int' , None, None,
[('-2147483648', '2147483647')], [],
''' ISIS Propogate To Level
''',
'propogate_to',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('proto-instance', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol instance
''',
'proto_instance',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Protocol to which policy attached
''',
'protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('route-policy-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' Policy that uses object in question
''',
'route_policy_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('saf-name', REFERENCE_ENUM_CLASS, 'SubAddressFamilyEnum' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'SubAddressFamilyEnum',
[], [],
''' Subsequent Address Family Identifier
''',
'saf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('source-protocol', ATTRIBUTE, 'str' , None, None,
[], [],
''' Source Protocol to redistribute,
Source Protocol can be one of the following
values {all,
connected, local, static, bgp, rip, isis, ospf,
ospfv3, eigrp, unknown }
''',
'source_protocol',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('vrf-name', ATTRIBUTE, 'str' , None, None,
[], [],
''' VRF name
''',
'vrf_name',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'binding',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached',
False,
[
_MetaInfoClassMember('binding', REFERENCE_LIST, 'Binding' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached.Binding',
[], [],
''' bindings list
''',
'binding',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'attached',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set',
False,
[
_MetaInfoClassMember('set-name', ATTRIBUTE, 'str' , None, None,
[], ['[\\w\\-\\.:,_@#%$\\+=\\|;]+'],
''' Set name
''',
'set_name',
'Cisco-IOS-XR-policy-repository-oper', True),
_MetaInfoClassMember('attached', REFERENCE_CLASS, 'Attached' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached',
[], [],
''' Information about where this policy or set is
attached
''',
'attached',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('used-by', REFERENCE_CLASS, 'UsedBy' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy',
[], [],
''' Policies that use this object, directly or
indirectly
''',
'used_by',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'set',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Sets_',
False,
[
_MetaInfoClassMember('set', REFERENCE_LIST, 'Set' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set',
[], [],
''' Information about an individual set
''',
'set',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Unused' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Unused',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'unused',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Inactive' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Inactive',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'inactive',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost.Active' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost.Active',
False,
[
_MetaInfoClassMember('object', REFERENCE_LEAFLIST, 'str' , None, None,
[], [],
''' Policy objects
''',
'object',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'active',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets.ExtendedCommunityCost' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets.ExtendedCommunityCost',
False,
[
_MetaInfoClassMember('active', REFERENCE_CLASS, 'Active' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Active',
[], [],
''' All objects of a given type that are attached to
a protocol
''',
'active',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('inactive', REFERENCE_CLASS, 'Inactive' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Inactive',
[], [],
''' All objects of a given type that are not
attached to a protocol
''',
'inactive',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets_' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Sets_',
[], [],
''' Information about individual sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('unused', REFERENCE_CLASS, 'Unused' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost.Unused',
[], [],
''' All objects of a given type that are not
referenced at all
''',
'unused',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'extended-community-cost',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy.Sets' : {
'meta_info' : _MetaInfoClass('RoutingPolicy.Sets',
False,
[
_MetaInfoClassMember('as-path', REFERENCE_CLASS, 'AsPath' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.AsPath',
[], [],
''' Information about AS Path sets
''',
'as_path',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('community', REFERENCE_CLASS, 'Community' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Community',
[], [],
''' Information about Community sets
''',
'community',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('extended-community-bandwidth', REFERENCE_CLASS, 'ExtendedCommunityBandwidth' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityBandwidth',
[], [],
''' Information about Extended Community Bandwidth
sets
''',
'extended_community_bandwidth',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('extended-community-cost', REFERENCE_CLASS, 'ExtendedCommunityCost' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityCost',
[], [],
''' Information about Extended Community Cost sets
''',
'extended_community_cost',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('extended-community-opaque', REFERENCE_CLASS, 'ExtendedCommunityOpaque' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityOpaque',
[], [],
''' Information about Extended Community Opaque
sets
''',
'extended_community_opaque',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('extended-community-rt', REFERENCE_CLASS, 'ExtendedCommunityRt' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunityRt',
[], [],
''' Information about Extended Community RT sets
''',
'extended_community_rt',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('extended-community-seg-nh', REFERENCE_CLASS, 'ExtendedCommunitySegNh' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySegNh',
[], [],
''' Information about Extended Community SegNH sets
''',
'extended_community_seg_nh',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('extended-community-soo', REFERENCE_CLASS, 'ExtendedCommunitySoo' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.ExtendedCommunitySoo',
[], [],
''' Information about Extended Community SOO sets
''',
'extended_community_soo',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('ospf-area', REFERENCE_CLASS, 'OspfArea' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.OspfArea',
[], [],
''' Information about OSPF Area sets
''',
'ospf_area',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('prefix', REFERENCE_CLASS, 'Prefix' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Prefix',
[], [],
''' Information about AS Path sets
''',
'prefix',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('rd', REFERENCE_CLASS, 'Rd' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Rd',
[], [],
''' Information about RD sets
''',
'rd',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('tag', REFERENCE_CLASS, 'Tag' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets.Tag',
[], [],
''' Information about Tag sets
''',
'tag',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'sets',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
'RoutingPolicy' : {
'meta_info' : _MetaInfoClass('RoutingPolicy',
False,
[
_MetaInfoClassMember('limits', REFERENCE_CLASS, 'Limits' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Limits',
[], [],
''' Information about configured limits and the
current values
''',
'limits',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('policies', REFERENCE_CLASS, 'Policies' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Policies',
[], [],
''' Information about configured route policies
''',
'policies',
'Cisco-IOS-XR-policy-repository-oper', False),
_MetaInfoClassMember('sets', REFERENCE_CLASS, 'Sets' , 'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper', 'RoutingPolicy.Sets',
[], [],
''' Information about configured sets
''',
'sets',
'Cisco-IOS-XR-policy-repository-oper', False),
],
'Cisco-IOS-XR-policy-repository-oper',
'routing-policy',
_yang_ns._namespaces['Cisco-IOS-XR-policy-repository-oper'],
'ydk.models.cisco_ios_xr.Cisco_IOS_XR_policy_repository_oper'
),
},
}
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets.Sets']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets.Sets']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedPolicies']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedSets']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.DirectlyUsedSets']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses.AllUsedPolicies']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.PolicyUses']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies.RoutePolicy']['meta_info'].parent =_meta_table['RoutingPolicy.Policies.RoutePolicies']['meta_info']
_meta_table['RoutingPolicy.Policies.RoutePolicies']['meta_info'].parent =_meta_table['RoutingPolicy.Policies']['meta_info']
_meta_table['RoutingPolicy.Policies.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Policies']['meta_info']
_meta_table['RoutingPolicy.Policies.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Policies']['meta_info']
_meta_table['RoutingPolicy.Policies.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Policies']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.OspfArea']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Tag']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Prefix']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community']['meta_info']
_meta_table['RoutingPolicy.Sets.Community.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Community']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.AsPath']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.Rd']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy.Reference']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached.Binding']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.UsedBy']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set.Attached']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_.Set']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Sets_']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Unused']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Inactive']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost.Active']['meta_info'].parent =_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost']['meta_info']
_meta_table['RoutingPolicy.Sets.OspfArea']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityOpaque']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySegNh']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunitySoo']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.Tag']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.Prefix']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.Community']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.AsPath']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityBandwidth']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityRt']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.Rd']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Sets.ExtendedCommunityCost']['meta_info'].parent =_meta_table['RoutingPolicy.Sets']['meta_info']
_meta_table['RoutingPolicy.Limits']['meta_info'].parent =_meta_table['RoutingPolicy']['meta_info']
_meta_table['RoutingPolicy.Policies']['meta_info'].parent =_meta_table['RoutingPolicy']['meta_info']
_meta_table['RoutingPolicy.Sets']['meta_info'].parent =_meta_table['RoutingPolicy']['meta_info']
| 52.285831
| 248
| 0.541425
| 22,234
| 256,828
| 5.998741
| 0.009985
| 0.090511
| 0.113139
| 0.137836
| 0.978279
| 0.97567
| 0.956266
| 0.948304
| 0.928443
| 0.896173
| 0
| 0.003751
| 0.331424
| 256,828
| 4,911
| 249
| 52.296477
| 0.773005
| 0
| 0
| 0.662328
| 0
| 0
| 0.468629
| 0.379844
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001893
| 0
| 0.001893
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f3bb22af2c50821be904b82b6a8a0cc02e26020b
| 80
|
py
|
Python
|
quickstart-jython/src/main/java/org/quickstart/jython/calculator_func.py
|
youngzil/quickstart-framework
|
5252ab4ffe089461969ed54420d3f9f8980baa03
|
[
"Apache-2.0"
] | 6
|
2019-01-02T11:02:38.000Z
|
2021-01-30T16:35:20.000Z
|
quickstart-jython/src/main/java/org/quickstart/jython/calculator_func.py
|
youngzil/quickstart-framework
|
5252ab4ffe089461969ed54420d3f9f8980baa03
|
[
"Apache-2.0"
] | 31
|
2019-11-13T02:06:18.000Z
|
2022-03-31T20:51:49.000Z
|
quickstart-jython/src/main/java/org/quickstart/jython/calculator_func.py
|
youngzil/quickstart-framework
|
5252ab4ffe089461969ed54420d3f9f8980baa03
|
[
"Apache-2.0"
] | 3
|
2018-07-10T15:08:02.000Z
|
2020-09-02T06:48:07.000Z
|
# coding=utf-8
import math
# 面向函数式编程
def power(x, y):
return math.pow(x, y)
| 13.333333
| 25
| 0.65
| 15
| 80
| 3.466667
| 0.8
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 0.2
| 80
| 6
| 25
| 13.333333
| 0.796875
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
340e49a5242b798707ff87f4d142cff5a72e6478
| 64
|
py
|
Python
|
pydatamailbox/__init__.py
|
optimdata/pydatamailbox
|
6db76131663e6eea519e5c422f7de2b39f5d9c0c
|
[
"MIT"
] | 1
|
2021-12-15T14:16:25.000Z
|
2021-12-15T14:16:25.000Z
|
pydatamailbox/__init__.py
|
optimdata/pydatamailbox
|
6db76131663e6eea519e5c422f7de2b39f5d9c0c
|
[
"MIT"
] | 3
|
2021-07-16T15:01:53.000Z
|
2021-09-23T15:54:06.000Z
|
pydatamailbox/__init__.py
|
optimdata/pydatamailbox
|
6db76131663e6eea519e5c422f7de2b39f5d9c0c
|
[
"MIT"
] | null | null | null |
from .client import * # NOQA
from .exceptions import * # NOQA
| 21.333333
| 33
| 0.6875
| 8
| 64
| 5.5
| 0.625
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.21875
| 64
| 2
| 34
| 32
| 0.88
| 0.140625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3434aafb017363afc7fce26333958da08575b1e2
| 54
|
py
|
Python
|
gym_game_nim/envs/__init__.py
|
hfwittmann/gym_game_nim
|
a2a4512db216e1883aba7c718418488e890c03e0
|
[
"MIT"
] | 1
|
2018-06-07T10:38:36.000Z
|
2018-06-07T10:38:36.000Z
|
gym_game_nim/envs/__init__.py
|
hfwittmann/gym_game_nim
|
a2a4512db216e1883aba7c718418488e890c03e0
|
[
"MIT"
] | null | null | null |
gym_game_nim/envs/__init__.py
|
hfwittmann/gym_game_nim
|
a2a4512db216e1883aba7c718418488e890c03e0
|
[
"MIT"
] | null | null | null |
from gym_game_nim.envs.game_nim_env import GameNimEnv
| 27
| 53
| 0.888889
| 10
| 54
| 4.4
| 0.8
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 54
| 1
| 54
| 54
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
346c04d7ca1cca2f95af3afea1d857ba92bbcae1
| 1,985
|
py
|
Python
|
kkbox_developer_sdk/feature_playlist_fetcher.py
|
garyckhsu/django-REST
|
94cc2c1009f6eb1124136a92bb92e93d490f2271
|
[
"Apache-2.0"
] | 71
|
2017-10-28T03:51:29.000Z
|
2021-08-03T14:44:31.000Z
|
kkbox_developer_sdk/feature_playlist_fetcher.py
|
garyckhsu/django-REST
|
94cc2c1009f6eb1124136a92bb92e93d490f2271
|
[
"Apache-2.0"
] | 2
|
2017-11-27T10:09:08.000Z
|
2018-01-31T10:12:16.000Z
|
kkbox_developer_sdk/feature_playlist_fetcher.py
|
garyckhsu/django-REST
|
94cc2c1009f6eb1124136a92bb92e93d490f2271
|
[
"Apache-2.0"
] | 15
|
2017-11-07T08:49:54.000Z
|
2022-02-04T04:39:53.000Z
|
#!/usr/bin/env python
# encoding: utf-8
from .fetcher import *
from .territory import *
class KKBOXFeaturePlaylistFetcher(Fetcher):
'''
List all featured playlists metadata.
See `https://docs-en.kkbox.codes/v1.1/reference#featured-playlists`.
'''
@assert_access_token
def fetch_all_feature_playlists(self, terr=KKBOXTerritory.TAIWAN):
'''
Fetches featured playlists.
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#featuredplaylists`.
'''
url = 'https://api.kkbox.com/v1.1/featured-playlists'
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
@assert_access_token
def fetch_feature_playlist(self, playlist_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches featured playlists.
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#featuredplaylists-playlist_id`.
'''
url = 'https://api.kkbox.com/v1.1/featured-playlists/%s' % playlist_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
@assert_access_token
def fetch_feature_playlist_tracks(self, playlist_id, terr=KKBOXTerritory.TAIWAN):
'''
Fetches featured playlists.
:param terr: the current territory.
:return: API response.
:rtype: dict
See `https://docs-en.kkbox.codes/v1.1/reference#featuredplaylists-playlist_id-tracks`.
'''
url = 'https://api.kkbox.com/v1.1/featured-playlists/%s/tracks' % playlist_id
url += '?' + url_parse.urlencode({'territory': terr})
return self.http._post_data(url, None, self.http._headers_with_access_token())
| 35.446429
| 94
| 0.655919
| 236
| 1,985
| 5.34322
| 0.258475
| 0.107851
| 0.038065
| 0.044409
| 0.84774
| 0.827914
| 0.827914
| 0.827914
| 0.827914
| 0.768438
| 0
| 0.009597
| 0.212594
| 1,985
| 55
| 95
| 36.090909
| 0.797185
| 0.345088
| 0
| 0.5
| 0
| 0
| 0.159785
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 1
| 0.166667
| false
| 0
| 0.111111
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
caa317d7f910510d4220e3b6a827b925a6204e3f
| 950
|
py
|
Python
|
saleor/plugins/sendgrid/__init__.py
|
greentornado/saleor
|
7f58917957a23c4dd90b47214a4500c91c735dee
|
[
"CC-BY-4.0"
] | 3
|
2021-06-22T12:38:18.000Z
|
2021-07-11T15:01:57.000Z
|
saleor/plugins/sendgrid/__init__.py
|
greentornado/saleor
|
7f58917957a23c4dd90b47214a4500c91c735dee
|
[
"CC-BY-4.0"
] | 111
|
2021-07-19T04:19:30.000Z
|
2022-03-28T04:32:37.000Z
|
saleor/plugins/sendgrid/__init__.py
|
aminziadna/saleor
|
2e78fb5bcf8b83a6278af02551a104cfa555a1fb
|
[
"CC-BY-4.0"
] | 6
|
2021-11-08T16:43:05.000Z
|
2022-03-22T17:31:16.000Z
|
from dataclasses import dataclass
from typing import Optional
@dataclass
class SendgridConfiguration:
api_key: Optional[str]
sender_name: Optional[str]
sender_address: Optional[str]
account_confirmation_template_id: Optional[str]
account_set_customer_password_template_id: Optional[str]
account_delete_template_id: Optional[str]
account_change_email_confirm_template_id: Optional[str]
account_change_email_request_template_id: Optional[str]
account_password_reset_template_id: Optional[str]
invoice_ready_template_id: Optional[str]
order_confirmation_template_id: Optional[str]
order_confirmed_template_id: Optional[str]
order_fulfillment_confirmation_template_id: Optional[str]
order_fulfillment_update_template_id: Optional[str]
order_payment_confirmation_template_id: Optional[str]
order_canceled_template_id: Optional[str]
order_refund_confirmation_template_id: Optional[str]
| 39.583333
| 61
| 0.823158
| 118
| 950
| 6.152542
| 0.305085
| 0.257576
| 0.347107
| 0.404959
| 0.615702
| 0.330579
| 0.107438
| 0
| 0
| 0
| 0
| 0
| 0.121053
| 950
| 23
| 62
| 41.304348
| 0.869461
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.095238
| 0.095238
| 0
| 0.952381
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
caca13293534ee564979b77470d054247898360a
| 67
|
py
|
Python
|
tests/java/org/python/indexer/data/mod2.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 577
|
2020-06-04T16:34:44.000Z
|
2022-03-31T11:46:07.000Z
|
tests/java/org/python/indexer/data/mod2.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 174
|
2015-01-08T20:37:09.000Z
|
2020-06-03T16:48:59.000Z
|
tests/java/org/python/indexer/data/mod2.py
|
jeff5/jython-whinchat
|
65d8e5268189f8197295ff2d91be3decb1ee0081
|
[
"CNRI-Jython"
] | 162
|
2015-02-07T02:14:38.000Z
|
2020-05-30T16:42:03.000Z
|
import distutils.command
def mod2test():
return dir(distutils)
| 11.166667
| 24
| 0.761194
| 8
| 67
| 6.375
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.149254
| 67
| 5
| 25
| 13.4
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 6
|
1b532ffb5cc0a8c9af7ba88e889e70cdbeab352a
| 482
|
py
|
Python
|
turtle2.py
|
agam21-meet/meet2019y1lab1
|
62babfd255c415d00e354708c0ae293fd8a2a93e
|
[
"MIT"
] | null | null | null |
turtle2.py
|
agam21-meet/meet2019y1lab1
|
62babfd255c415d00e354708c0ae293fd8a2a93e
|
[
"MIT"
] | null | null | null |
turtle2.py
|
agam21-meet/meet2019y1lab1
|
62babfd255c415d00e354708c0ae293fd8a2a93e
|
[
"MIT"
] | null | null | null |
import turtle
turtle.penup()
turtle.goto(-200,-100)
turtle.pendown()
turtle.goto(-200,-100+200)
turtle.goto(-200+50,-100)
turtle.goto(-200+100,-100+200)
turtle.goto(-200+100,-100)
turtle.penup()
turtle.goto(-200+150,-100+200)
turtle.pendown()
turtle.goto(-200+150,-100)
turtle.goto(-200+250,-100)
turtle.penup()
turtle.goto(-200+250,0)
turtle.pendown()
turtle.goto(-200+150,0)
turtle.penup()
turtle.goto(-200+250,100)
turtle.pendown()
turtle.goto(-200+150,100)
turtle.mainloop()
| 20.083333
| 30
| 0.721992
| 81
| 482
| 4.296296
| 0.148148
| 0.344828
| 0.448276
| 0.241379
| 0.913793
| 0.663793
| 0.218391
| 0.218391
| 0
| 0
| 0
| 0.232456
| 0.053942
| 482
| 23
| 31
| 20.956522
| 0.530702
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.045455
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1b5766871b9a4b303f670ba9fcb2ac22295bb426
| 30
|
py
|
Python
|
tests/__init__.py
|
francois-vincent/docker_orchestrator
|
b0207f2abeab574c015a79d1064702fc5bc0089e
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
francois-vincent/docker_orchestrator
|
b0207f2abeab574c015a79d1064702fc5bc0089e
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
francois-vincent/docker_orchestrator
|
b0207f2abeab574c015a79d1064702fc5bc0089e
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
import sys
| 7.5
| 17
| 0.7
| 5
| 30
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 0.2
| 30
| 3
| 18
| 10
| 0.833333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1babc1405790461b51af0a299a62e5d8473bc5a9
| 20,920
|
py
|
Python
|
test_case_4_3.py
|
Alekseybykov126/autotesting
|
7febd5717f7d677ce7c1fc2d1c8a3b9c8e0ed902
|
[
"Apache-2.0"
] | null | null | null |
test_case_4_3.py
|
Alekseybykov126/autotesting
|
7febd5717f7d677ce7c1fc2d1c8a3b9c8e0ed902
|
[
"Apache-2.0"
] | null | null | null |
test_case_4_3.py
|
Alekseybykov126/autotesting
|
7febd5717f7d677ce7c1fc2d1c8a3b9c8e0ed902
|
[
"Apache-2.0"
] | null | null | null |
from Regress_web.page import *
time.sleep(1)
def case_4_3(self, full_screen):
self.page.loger('\n Запуск Тест кейс № 4_3 tvweb_new-4_3: Проверка работоспособности кнопок "Смотреть все" с главной страницы \n')
time.sleep(2)
## Бесплатные фильмы
self.page.loger('Шаг 1. Проверка наличия Покупки и Подписки в списке бесплатных фильмов')
#self.page.waitForElementVisible('.//div[@id="compilation-216"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-12"]')
target.location_once_scrolled_into_view
self.page.waitForElementVisible('.//div[@id="compilation-216"]', 10) # Контейнер с фильмами
status_txt = str(self.result.find_link("div", "compilation-216")) # Исключает Покупку и Подписку из списка фильмов
assert('Покупка') not in status_txt
assert('Подписка') not in status_txt
self.driver.find_elements_by_xpath('.//button[@data-related-compilation-id="216"]')[1].click() # Клик стрелки прокрутки вправо
time.sleep(2)
self.page.waitForElementVisible('.//div[@id="compilation-216"]', 10)
status_txt = str(self.result.find_link("div", "compilation-216"))
assert('Покупка') not in status_txt
assert('Подписка') not in status_txt
self.driver.find_element_by_xpath('.//button[@data-related-compilation-id="216"]').click() # Клик стрелки прокрутки влево
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@data-related-compilation-id="216"]').click() # Клик стрелки прокрутки влево
self.page.waitForElementVisible('.//div[@id="compilation-216"]', 10)
status_txt = str(self.result.find_link("div", "compilation-216"))
assert('Покупка') not in status_txt
assert('Подписка') not in status_txt
self.driver.find_element_by_xpath('.//button[@data-related-compilation-id="216"]').click() # Клик стрелки прокрутки влево
self.page.waitForElementVisible('.//div[@id="compilation-216"]', 10)
status_txt = str(self.result.find_link("div", "compilation-216"))
assert('Покупка') not in status_txt
assert('Подписка') not in status_txt
self.page.loger('В разделе "Бесплатные фильмы" отсутствуют платные и фильмы по подписке')
time.sleep(2)
self.page.loger('Шаг 2. Проверка перехода в раздел "Бесплатные фильмы" через кнопку "Смотреть все"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/besplatnye-filmy/"]').click() # Клик на "Смотреть все"
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Бесплатные фильмы') in head_txt
self.page.loger('Переход в раздел "Бесплатные фильмы" подтвержден')
self.driver.back()
time.sleep(3)
### Новинки
target = self.driver.find_element_by_xpath('.//div[@id="compilation-60"]')
target.location_once_scrolled_into_view
self.page.loger('Шаг 3. Проверка раздела "Новинки"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/novinki/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Новинки') in head_txt
self.page.loger('Переход в раздел "Новинки" подтвержден')
self.driver.back()
time.sleep(3)
## Фильмы на Хэллоуин____________!
self.page.loger('Шаг 4. Проверка раздела "Фильмы на Хэллоуин"')
target = self.driver.find_element_by_xpath('.//div[@id="compilation-suggestions-base"]')
target.location_once_scrolled_into_view
self.driver.find_element_by_xpath('.//a[@href="/catalog/helloween/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Фильмы на Хэллоуин') in head_txt
self.page.loger('Переход в раздел "Фильмы на Хэллоуин" подтвержден')
self.driver.back()
time.sleep(3)
## Смотреть по подписке
target = self.driver.find_element_by_xpath('.//div[@id="compilation-216"]')
target.location_once_scrolled_into_view
self.page.waitForElementVisible('.//div[@id="compilation-60"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-60"]') # скролл до подборки на Хэллоуин
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.waitForElementVisible('.//div[@id="compilation-260"]', 10) # Контейнер с фильмами
status_txt = str(self.result.find_link("div", "compilation-260")) # Исключает Покупку и Бесплатно из списка фильмов
assert('Покупка') not in status_txt
assert('Бесплатно') not in status_txt
self.driver.find_elements_by_xpath('.//button[@data-related-compilation-id="260"]')[1].click() # Клик стрелки прокрутки вправо
time.sleep(2)
self.page.waitForElementVisible('.//div[@id="compilation-260"]', 10)
status_txt = str(self.result.find_link("div", "compilation-260"))
assert('Покупка') not in status_txt
assert('Бесплатно') not in status_txt
self.driver.find_element_by_xpath('.//button[@data-related-compilation-id="260"]').click() # Клик стрелки прокрутки влево
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@data-related-compilation-id="260"]').click() # Клик стрелки прокрутки влево
self.page.waitForElementVisible('.//div[@id="compilation-216"]', 10)
status_txt = str(self.result.find_link("div", "compilation-216"))
assert('Покупка') not in status_txt
assert('Бесплатно') not in status_txt
self.driver.find_element_by_xpath('.//button[@data-related-compilation-id="260"]').click() # Клик стрелки прокрутки влево
self.page.waitForElementVisible('.//div[@id="compilation-216"]', 10)
status_txt = str(self.result.find_link("div", "compilation-216"))
assert('Покупка') not in status_txt
assert('Бесплатно') not in status_txt
self.page.loger('В разделе "Смотреть по подписке" отсутствуют платные и бесплатные фильмы')
time.sleep(2)
self.page.loger('Шаг 5. Проверка перехода в раздел "Смотреть по подписке" через кнопку "Смотреть все"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/smotret-po-podpiske/"]').click() # Клик на "Смотреть все"
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Смотреть по подписке') in head_txt
self.page.loger('Переход в раздел "Смотреть по подписке" подтвержден')
self.driver.back()
time.sleep(3)
## Лучшие фильмы киностудии Paramount
self.page.waitForElementVisible('.//div[@id="compilation-260"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-260"]') # скролл до подборки Смотреть по подписке
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 6. Проверка раздела "Лучшие фильмы киностудии Paramount"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/luchshie-filmy-kinostudii-paramount/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Лучшие фильмы киностудии Paramount') in head_txt
self.page.loger('Переход в раздел "Лучшие фильмы киностудии Paramount" подтвержден')
self.driver.back()
time.sleep(3)
## Фильмы ужасов
self.page.waitForElementVisible('.//div[@id="compilation-228"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-228"]') # скролл до подборки Смотреть по подписке
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 7. Проверка раздела "Фильмы ужасов"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/filmy-uzhasov/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Фильмы ужасов') in head_txt
self.page.loger('Переход в раздел "Фильмы ужасов" подтвержден')
time.sleep(2)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на развернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow rollup_open"]', 10)
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на свернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow"]', 10)
time.sleep(1)
self.page.loger('Кнопка Свернуть/Развернуть работает')
self.driver.back()
time.sleep(3)
### MoviesChain by tvzavr
self.page.waitForElementVisible('.//div[@id="compilation-23"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-23"]') # скролл до подборки
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 8. Проверка раздела "MoviesChain by tvzavr"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/movieschain/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('MoviesChain by tvzavr') in head_txt
self.page.loger('Переход в раздел "MoviesChain by tvzavr" подтвержден')
self.driver.back()
time.sleep(3)
### Современные мультфильмы
self.page.waitForElementVisible('.//div[@id="compilation-230"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-230"]') # скролл до подборки
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 9. Проверка раздела "Современные мультфильмы"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/modern-cartoons/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Современные мультфильмы') in head_txt
self.page.loger('Переход в раздел "Современные мультфильмы" подтвержден')
time.sleep(2)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на развернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow rollup_open"]', 10)
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на свернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow"]', 10)
time.sleep(1)
self.page.loger('Кнопка Свернуть/Развернуть работает')
self.driver.back()
time.sleep(3)
## Лучшие ремейки
self.page.waitForElementVisible('.//div[@id="compilation-6"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-6"]') # скролл до подборки Смотреть по подписке
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 10. Проверка раздела "Лучшие ремейки"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/luchshie-remeyki/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Лучшие ремейки') in head_txt
self.page.loger('Переход в раздел "Лучшие ремейки" подтвержден')
self.driver.back()
time.sleep(3)
## Молодёжные комедии
self.page.waitForElementVisible('.//div[@id="compilation-279"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-279"]') # скролл до подборки
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 11. Проверка раздела "Молодёжные комедии"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/molodezhnye-komedii/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Молодёжные комедии') in head_txt
self.page.loger('Переход в раздел "Молодёжные комедии" подтвержден')
time.sleep(2)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на развернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow rollup_open"]', 10)
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на свернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow"]', 10)
time.sleep(1)
self.page.loger('Кнопка Свернуть/Развернуть работает')
self.driver.back()
time.sleep(3)
## Французские фильмы
self.page.waitForElementVisible('.//div[@id="compilation-139"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-139"]') # скролл до подборки
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 12. Проверка раздела "Французские фильмы"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/frantsuzskie-filmy/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Французские фильмы') in head_txt
self.page.loger('Переход в раздел "Французские фильмы" подтвержден')
time.sleep(2)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на развернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow rollup_open"]', 10)
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на свернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow"]', 10)
time.sleep(1)
self.page.loger('Кнопка Свернуть/Развернуть работает')
self.driver.back()
time.sleep(3)
## Психологические триллеры
self.page.waitForElementVisible('.//div[@id="compilation-18"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-18"]') # скролл до подборки Смотреть по подписке
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 13. Проверка раздела "Психологические триллеры"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/psihologicheskie-trillery/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Психологические триллеры') in head_txt
self.page.loger('Переход в раздел "Психологические триллеры" подтвержден')
self.driver.back()
time.sleep(3)
### Советские мультфильмы
self.page.waitForElementVisible('.//div[@id="compilation-91"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-91"]') # скролл до подборки
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 14. Проверка раздела "Советские мультфильмы"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/sovetskie-multfilmy/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Советские мультфильмы') in head_txt
self.page.loger('Переход в раздел "Советские мультфильмы" подтвержден')
time.sleep(2)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на развернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow rollup_open"]', 10)
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на свернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow"]', 10)
time.sleep(1)
self.page.loger('Кнопка Свернуть/Развернуть работает')
self.driver.back()
time.sleep(3)
### Высокобюджетные фильмы
self.page.waitForElementVisible('.//div[@id="compilation-17"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-17"]') # скролл до подборки Смотреть по подписке
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 15. Проверка раздела "Высокобюджетные фильмы"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/vysokobyudzhetnye-filmy/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Высокобюджетные фильмы') in head_txt
self.page.loger('Переход в раздел "Высокобюджетные фильмы" подтвержден')
self.driver.back()
time.sleep(3)
### Фильмы из Топ-250 КиноПоиска
self.page.waitForElementVisible('.//div[@id="compilation-41"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-41"]') # скролл до подборки Смотреть по подписке
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 16. Проверка раздела "Фильмы из Топ-250 КиноПоиска"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/filmy-iz-top-250-kinopoiska/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Фильмы из Топ-250 КиноПоиска') in head_txt
self.page.loger('Переход в раздел "Фильмы из Топ-250 КиноПоиска" подтвержден')
self.driver.back()
time.sleep(3)
### Фильмы, основанные на реальных событиях
self.page.waitForElementVisible('.//div[@id="compilation-48"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-48"]') # скролл до подборки Смотреть по подписке
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 17. Проверка раздела "Фильмы, основанные на реальных событиях"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/filmy-osnovannye-na-realnyh-sobytiyah/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Фильмы, основанные на реальных событиях') in head_txt
self.page.loger('Переход в раздел "Фильмы, основанные на реальных событиях" подтвержден')
self.driver.back()
time.sleep(3)
### Биографические фильмы
self.page.waitForElementVisible('.//div[@id="compilation-26"]', 10)
target = self.driver.find_element_by_xpath('.//div[@id="compilation-26"]') # скролл до подборки
target.location_once_scrolled_into_view # скролл
time.sleep(2)
self.page.loger('Шаг 18. Проверка раздела "Биографические фильмы"')
self.driver.find_element_by_xpath('.//a[@href="/catalog/biograficheskie-filmy/"]').click()
time.sleep(3)
self.page.waitForElementVisible('.//h1[@class="page__heading superheading-1"]', 10)
head_txt = str(self.result.find_link("h1", "page__heading superheading-1"))
assert('Биографические фильмы') in head_txt
self.page.loger('Переход в раздел "Биографические фильмы" подтвержден')
time.sleep(2)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на развернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow rollup_open"]', 10)
time.sleep(1)
self.driver.find_element_by_xpath('.//button[@class="rollup__toggle js-rollup-toggle"]').click() # Клик на свернуть
self.page.waitForElementVisible('.//div[@class="page__description seo-info rollup js-rollup rollup_overflow"]', 10)
time.sleep(1)
self.page.loger('Кнопка Свернуть/Развернуть работает')
self.driver.back()
time.sleep(3)
self.driver.quit()
| 54.764398
| 134
| 0.716061
| 2,779
| 20,920
| 5.220943
| 0.074127
| 0.052933
| 0.053071
| 0.076711
| 0.873113
| 0.859949
| 0.83107
| 0.791578
| 0.766903
| 0.738369
| 0
| 0.024244
| 0.128537
| 20,920
| 382
| 135
| 54.764398
| 0.771488
| 0.07739
| 0
| 0.66875
| 0
| 0.003125
| 0.418246
| 0.181004
| 0
| 0
| 0
| 0
| 0.103125
| 1
| 0.003125
| false
| 0
| 0.003125
| 0
| 0.00625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1bb0362389ed192c54b4fb71339af83a9cde411e
| 25,554
|
py
|
Python
|
tests/api/ils/records_relations/test_records_relations_siblings.py
|
topless/invenio-app-ils
|
38f5a6b61cdeaf5fa5776613073fa46af28737a9
|
[
"MIT"
] | null | null | null |
tests/api/ils/records_relations/test_records_relations_siblings.py
|
topless/invenio-app-ils
|
38f5a6b61cdeaf5fa5776613073fa46af28737a9
|
[
"MIT"
] | 21
|
2018-11-02T14:19:53.000Z
|
2021-06-25T15:16:42.000Z
|
tests/api/ils/records_relations/test_records_relations_siblings.py
|
topless/invenio-app-ils
|
38f5a6b61cdeaf5fa5776613073fa46af28737a9
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# invenio-app-ils is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test records relations siblings."""
import json
from flask import url_for
from invenio_app_ils.documents.api import Document
from tests.helpers import get_test_record, user_login
from .helpers import (recrel_assert_record_relations,
recrel_choose_endpoints_and_do_request)
def _test_sibl_language_relation(client, json_headers):
"""Test creation/deletion siblings language relations."""
first_pid_value = "docid-1"
first_pid_type = "docid"
second_pid_value = "docid-2"
second_pid_type = "docid"
third_pid_value = "docid-6"
third_pid_type = "docid"
relation_type = "language"
payload = [
{
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"relation_type": relation_type,
},
{
"pid_value": third_pid_value,
"pid_type": third_pid_type,
"relation_type": relation_type,
},
]
def _test_create():
"""Test relation creation."""
rec1, rec2 = recrel_choose_endpoints_and_do_request(
(client, json_headers, "POST"),
(
first_pid_value,
first_pid_type,
second_pid_value,
second_pid_type,
),
payload,
)
rec3 = Document.get_record_by_pid(third_pid_value)
rec3 = rec3.replace_refs()
recrel_assert_record_relations(
rec1,
expected={
"relations": {
"language": [
{
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"relation_type": "language",
"record_metadata": {
"title": rec2["title"],
"languages": rec2["languages"],
"document_type": rec2["document_type"],
"publication_year": rec2["publication_year"],
},
},
{
"pid_value": third_pid_value,
"pid_type": third_pid_type,
"relation_type": "language",
"record_metadata": {
"title": rec3["title"],
"document_type": rec3["document_type"],
"languages": rec3["languages"],
"publication_year": rec3["publication_year"],
},
},
]
}
},
)
recrel_assert_record_relations(
rec2,
expected={
"relations": {
"language": [
{
"pid_value": first_pid_value,
"pid_type": first_pid_type,
"relation_type": "language",
"record_metadata": {
"title": rec1["title"],
"languages": rec1["languages"],
"edition": rec1["edition"],
"document_type": rec1["document_type"],
"publication_year": rec1["publication_year"],
},
},
{
"pid_value": third_pid_value,
"pid_type": third_pid_type,
"relation_type": "language",
"record_metadata": {
"title": rec3["title"],
"languages": rec3["languages"],
"document_type": rec3["document_type"],
"publication_year": rec3["publication_year"],
},
},
]
}
},
)
recrel_assert_record_relations(
rec3,
expected={
"relations": {
"language": [
{
"pid_value": first_pid_value,
"pid_type": first_pid_type,
"relation_type": "language",
"record_metadata": {
"title": rec1["title"],
"languages": rec1["languages"],
"edition": rec1["edition"],
"document_type": rec1["document_type"],
"publication_year": rec1["publication_year"],
},
},
{
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"relation_type": "language",
"record_metadata": {
"title": rec2["title"],
"languages": rec2["languages"],
"document_type": rec2["document_type"],
"publication_year": rec2["publication_year"],
},
},
]
}
},
)
def _test_delete():
"""Test relation creation."""
rec1, rec2 = recrel_choose_endpoints_and_do_request(
(client, json_headers, "DELETE"),
(
first_pid_value,
first_pid_type,
second_pid_value,
second_pid_type,
),
payload,
)
rec3 = Document.get_record_by_pid(third_pid_value)
rec3 = rec3.replace_refs()
recrel_assert_record_relations(rec1, expected={"relations": {}})
recrel_assert_record_relations(rec2, expected={"relations": {}})
recrel_assert_record_relations(rec3, expected={"relations": {}})
_test_create()
_test_delete()
# recreate for the next one, to have some more valuable test data
_test_create()
def _test_sibl_edition_relation(client, json_headers, testdata):
"""Test creation/deletion siblings edition relations."""
first_pid_value = "docid-3"
first_pid_type = "docid"
second_pid_value = "docid-1"
second_pid_type = "docid"
relation_type = "edition"
payload = {
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"relation_type": relation_type,
}
def _test_create():
"""Test relation creation."""
rec1, rec2 = recrel_choose_endpoints_and_do_request(
(client, json_headers, "POST"),
(
first_pid_value,
first_pid_type,
second_pid_value,
second_pid_type,
),
payload,
)
recrel_assert_record_relations(
rec1,
expected={
"relations": {
"edition": [
{
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"relation_type": "edition",
"record_metadata": {
"title": rec2["title"],
"edition": rec2["edition"],
"languages": rec2["languages"],
"document_type": rec2["document_type"],
"publication_year": rec2["publication_year"],
},
}
]
}
},
)
rec_docid_2 = get_test_record(testdata, "documents", "docid-2")
rec_docid_6 = get_test_record(testdata, "documents", "docid-6")
recrel_assert_record_relations(
rec2,
expected={
"relations": {
"edition": [
{
"pid_value": first_pid_value,
"pid_type": first_pid_type,
"relation_type": "edition",
"record_metadata": {
"title": rec1["title"],
"edition": rec1["edition"],
"document_type": rec1["document_type"],
"publication_year": rec1["publication_year"],
},
}
],
"language": [
{
"pid_value": rec_docid_2["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_2["title"],
"languages": rec_docid_2["languages"],
"document_type": rec_docid_2["document_type"],
"publication_year": rec_docid_2[
"publication_year"
],
},
},
{
"pid_value": rec_docid_6["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_6["title"],
"document_type": rec_docid_6["document_type"],
"languages": rec_docid_6['languages'],
"publication_year": rec_docid_6[
"publication_year"
],
},
},
],
}
},
)
def _test_delete():
"""Test relation creation."""
rec1, rec2 = recrel_choose_endpoints_and_do_request(
(client, json_headers, "DELETE"),
(
first_pid_value,
first_pid_type,
second_pid_value,
second_pid_type,
),
payload,
)
recrel_assert_record_relations(rec1, expected={"relations": {}})
rec_docid_2 = get_test_record(testdata, "documents", "docid-2")
rec_docid_6 = get_test_record(testdata, "documents", "docid-6")
recrel_assert_record_relations(
rec2,
expected={
"relations": {
"language": [
{
"pid_value": rec_docid_2["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_2["title"],
"languages": rec_docid_2["languages"],
"document_type": rec_docid_2["document_type"],
"publication_year": rec_docid_2[
"publication_year"
],
},
},
{
"pid_value": rec_docid_6["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_6["title"],
"document_type": rec_docid_6["document_type"],
"languages": rec_docid_6["languages"],
"publication_year": rec_docid_6[
"publication_year"
],
},
},
]
}
},
)
def _test_empty_edition_field():
edition_first_pid_value = "docid-11"
edition_first_pid_type = "docid"
edition_second_pid_value = "docid-12"
edition_second_pid_type = "docid"
create_payload = {
"pid_value": edition_second_pid_value,
"pid_type": edition_second_pid_type,
"relation_type": relation_type,
}
rec1, rec2 = recrel_choose_endpoints_and_do_request(
(client, json_headers, "POST"),
(
edition_first_pid_value,
edition_first_pid_type,
edition_second_pid_value,
edition_second_pid_type,
),
create_payload,
expect_status_code=400
)
_test_create()
_test_delete()
# recreate for the next one, to have some more valuable test data
_test_create()
_test_empty_edition_field()
def _test_sibl_other_relation(client, json_headers, testdata):
"""Test creation/deletion siblings other relations."""
first_pid_value = "docid-2"
first_pid_type = "docid"
second_pid_value = "docid-3"
second_pid_type = "docid"
relation_type = "other"
payload = {
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"relation_type": relation_type,
"note": "exercise",
}
def _test_create():
"""Test relation creation."""
rec1, rec2 = recrel_choose_endpoints_and_do_request(
(client, json_headers, "POST"),
(
first_pid_value,
first_pid_type,
second_pid_value,
second_pid_type,
),
payload,
)
rec_docid_1 = get_test_record(testdata, "documents", "docid-1")
rec_docid_6 = get_test_record(testdata, "documents", "docid-6")
recrel_assert_record_relations(
rec1,
expected={
"relations_extra_metadata": {
"other": [
{
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"note": "exercise",
}
]
},
"relations": {
"language": [
{
"pid_value": rec_docid_1["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_1["title"],
"edition": rec_docid_1["edition"],
"languages": rec_docid_1["languages"],
"document_type": rec_docid_1["document_type"],
"publication_year": rec_docid_1[
"publication_year"
],
},
},
{
"pid_value": rec_docid_6["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_6["title"],
"document_type": rec_docid_6["document_type"],
"languages": rec_docid_6["languages"],
"publication_year": rec_docid_6[
"publication_year"
],
},
},
],
"other": [
{
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"note": "exercise",
"relation_type": "other",
"record_metadata": {
"title": rec2["title"],
"edition": rec2["edition"],
"document_type": rec2["document_type"],
"publication_year": rec2["publication_year"],
},
}
],
},
},
)
recrel_assert_record_relations(
rec2,
expected={
"relations": {
"edition": [
{
"pid_value": rec_docid_1["pid"],
"pid_type": "docid",
"relation_type": "edition",
"record_metadata": {
"title": rec_docid_1["title"],
"edition": rec_docid_1["edition"],
"languages": rec_docid_1["languages"],
"document_type": rec_docid_1["document_type"],
"publication_year": rec_docid_1[
"publication_year"
],
},
}
],
"other": [
{
"pid_value": first_pid_value,
"pid_type": first_pid_type,
"note": "exercise",
"relation_type": "other",
"record_metadata": {
"title": rec1["title"],
"languages": rec1["languages"],
"document_type": rec1["document_type"],
"publication_year": rec1["publication_year"],
},
}
],
}
},
)
def _test_delete():
"""Test relation creation."""
rec1, rec2 = recrel_choose_endpoints_and_do_request(
(client, json_headers, "DELETE"),
(
first_pid_value,
first_pid_type,
second_pid_value,
second_pid_type,
),
payload,
)
rec_docid_1 = get_test_record(testdata, "documents", "docid-1")
rec_docid_6 = get_test_record(testdata, "documents", "docid-6")
recrel_assert_record_relations(
rec1,
expected={
"relations": {
"language": [
{
"pid_value": rec_docid_1["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_1["title"],
"edition": rec_docid_1["edition"],
"languages": rec_docid_1["languages"],
"document_type": rec_docid_1["document_type"],
"publication_year": rec_docid_1[
"publication_year"
],
},
},
{
"pid_value": rec_docid_6["pid"],
"pid_type": "docid",
"relation_type": "language",
"record_metadata": {
"title": rec_docid_6["title"],
"document_type": rec_docid_6["document_type"],
"languages": rec_docid_6["languages"],
"publication_year": rec_docid_6[
"publication_year"
],
},
},
]
}
},
)
recrel_assert_record_relations(
rec2,
expected={
"relations": {
"edition": [
{
"pid_value": rec_docid_1["pid"],
"pid_type": "docid",
"relation_type": "edition",
"record_metadata": {
"title": rec_docid_1["title"],
"edition": rec_docid_1["edition"],
"languages": rec_docid_1["languages"],
"document_type": rec_docid_1["document_type"],
"publication_year": rec_docid_1[
"publication_year"
],
},
}
]
}
},
)
_test_create()
_test_delete()
# recreate for the next one, to have some more valuable test data
_test_create()
def _test_sibl_invalid_relations_should_fail(
client, json_headers, invalids, status_code=400
):
"""Test relation creation with invalid siblings should fail."""
api_endpoint_documents = "invenio_app_ils_relations.docid_relations"
api_endpoint_series = "invenio_app_ils_relations.serid_relations"
for invalid in invalids:
first_pid_value = invalid["first_pid_value"]
first_pid_type = invalid["first_pid_type"]
second_pid_value = invalid["second_pid_value"]
second_pid_type = invalid["second_pid_type"]
relation_type = invalid["relation_type"]
api_endpoint = (
api_endpoint_documents
if first_pid_type == "docid"
else api_endpoint_series
)
url = url_for(api_endpoint, pid_value=first_pid_value)
payload = {
"pid_value": second_pid_value,
"pid_type": second_pid_type,
"relation_type": relation_type,
}
res = client.post(url, headers=json_headers, data=json.dumps(payload))
assert res.status_code == status_code
if status_code == 400:
error = json.loads(res.data.decode("utf-8"))
assert "message" in error
assert first_pid_value in error["message"]
assert second_pid_value in error["message"]
def test_siblings_relations(client, json_headers, testdata, users):
"""Test siblings relations."""
# only one test method to speed up tests and avoid testdata recreation at
# each test. As drawback, testdata is not cleaned between each test, so
# do not change the order of execution of the following tests :)
_test_sibl_invalid_relations_should_fail(
client,
json_headers,
[
{
"first_pid_value": "docid-1",
"first_pid_type": "docid",
"second_pid_value": "docid-2",
"second_pid_type": "docid",
"relation_type": "language",
}
],
status_code=401,
)
user_login(client, "librarian", users)
# docid-1 --language--> docid-2 and docid-6
_test_sibl_language_relation(client, json_headers)
# docid-3 --edition--> docid-1
_test_sibl_edition_relation(client, json_headers, testdata)
# docid-2 --other--> docid-3
_test_sibl_other_relation(client, json_headers, testdata)
# test wrong relations
invalids = [
# different pid type
{
"first_pid_value": "docid-1",
"first_pid_type": "docid",
"second_pid_value": "serid-1",
"second_pid_type": "serid",
"relation_type": "language",
},
# invalid edition: document with serial
{
"first_pid_value": "serid-3",
"first_pid_type": "serid",
"second_pid_value": "docid-5",
"second_pid_type": "docid",
"relation_type": "edition",
},
# different pid type
{
"first_pid_value": "serid-1",
"first_pid_type": "serid",
"second_pid_value": "docid-1",
"second_pid_type": "docid",
"relation_type": "other",
},
# same record
{
"first_pid_value": "docid-6",
"first_pid_type": "docid",
"second_pid_value": "docid-6",
"second_pid_type": "docid",
"relation_type": "language",
},
]
_test_sibl_invalid_relations_should_fail(client, json_headers, invalids)
| 37.745938
| 78
| 0.417195
| 1,932
| 25,554
| 5.101449
| 0.080745
| 0.070617
| 0.041193
| 0.034497
| 0.825487
| 0.793324
| 0.774249
| 0.737622
| 0.703734
| 0.67015
| 0
| 0.014768
| 0.488573
| 25,554
| 676
| 79
| 37.801775
| 0.739383
| 0.047312
| 0
| 0.626891
| 0
| 0
| 0.183062
| 0.004368
| 0
| 0
| 0
| 0
| 0.031933
| 1
| 0.020168
| false
| 0
| 0.008403
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1bb3de19b15c71c2b5d90d23fe98c66d7e890433
| 359
|
py
|
Python
|
mlrun/api/crud/__init__.py
|
katyakats/mlrun
|
e0cacad73113f616eeb5c0297628b87244957ab0
|
[
"Apache-2.0"
] | null | null | null |
mlrun/api/crud/__init__.py
|
katyakats/mlrun
|
e0cacad73113f616eeb5c0297628b87244957ab0
|
[
"Apache-2.0"
] | null | null | null |
mlrun/api/crud/__init__.py
|
katyakats/mlrun
|
e0cacad73113f616eeb5c0297628b87244957ab0
|
[
"Apache-2.0"
] | null | null | null |
from .artifacts import Artifacts # noqa: F401
from .feature_store import FeatureStore # noqa: F401
from .functions import Functions # noqa: F401
from .logs import Logs # noqa: F401
from .pipelines import Pipelines # noqa: F401
from .projects import Projects # noqa: F401
from .runs import Runs # noqa: F401
from .runtimes import Runtimes # noqa: F401
| 39.888889
| 53
| 0.754875
| 49
| 359
| 5.510204
| 0.285714
| 0.237037
| 0.311111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081356
| 0.178273
| 359
| 8
| 54
| 44.875
| 0.833898
| 0.24234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
943e9bdaff4f33acbca425549ac0c52afcf8d5cd
| 20,291
|
py
|
Python
|
handlers/users/change_datas.py
|
KARTASAR/DatingBot
|
fa20bb9188e6d9744ecdb35854ea54fb7ea8fd55
|
[
"MIT"
] | 12
|
2021-10-20T16:05:02.000Z
|
2022-03-22T14:20:30.000Z
|
handlers/users/change_datas.py
|
KARTASAR/DatingBot
|
fa20bb9188e6d9744ecdb35854ea54fb7ea8fd55
|
[
"MIT"
] | 17
|
2021-11-02T20:33:14.000Z
|
2022-03-17T11:45:24.000Z
|
handlers/users/change_datas.py
|
KARTASAR/DatingBot
|
fa20bb9188e6d9744ecdb35854ea54fb7ea8fd55
|
[
"MIT"
] | 7
|
2021-09-25T20:19:17.000Z
|
2022-02-20T09:31:41.000Z
|
from keyboards.inline.lifestyle_choice_inline import lifestyle_inline_kb
from keyboards.inline.change_profile_inline import change_profile_kb
from aiogram.utils.exceptions import MessageToReplyNotFound
from aiogram.types import CallbackQuery, ContentType
from keyboards.inline.main_menu import inline_start
from states.new_data_state import NewData
from aiogram.dispatcher import FSMContext
from loader import dp, bot, db
from aiogram import types
@dp.callback_query_handler(text='change_profile')
async def start_change_data(call: CallbackQuery):
await call.answer(cache_time=60)
await bot.send_message(call.from_user.id, f'Выберите, что вы хотите изменить: ', reply_markup=change_profile_kb)
@dp.message_handler(text='Имя')
async def change_name(message: types.Message):
await message.reply(f'Введите новое имя')
await NewData.name.set()
@dp.message_handler(state=NewData.name)
async def change_name(message: types.Message, state: FSMContext):
try:
await db.update_user_varname(varname=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваше новое имя: <b>{message.text}</b>', reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Возраст')
async def change_age(message: types.Message):
await message.reply(f'Введите новый возраст')
await NewData.age.set()
@dp.message_handler(state=NewData.age)
async def change_age(message: types.Message, state: FSMContext):
try:
await db.update_user_age(age=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваш новый возраст: <b>{message.text}</b>', reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Национальность')
async def change_nationality(message: types.Message):
await message.reply(f'Введите новую национальность')
await NewData.nationality.set()
@dp.message_handler(state=NewData.nationality)
async def change_nationality(message: types.Message, state: FSMContext):
try:
await db.update_user_national(national=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваша новая национальность: <b>{message.text}</b>',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Город')
async def change_city(message: types.Message):
await message.reply(f'Введите новый город')
await NewData.city.set()
@dp.message_handler(state=NewData.city)
async def change_city(message: types.Message, state: FSMContext):
try:
await db.update_user_city(city=message.text, telegram_id=message.from_user.id)
await message.reply(f'Ваш новый город: <b>{message.text}</b>', reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Пол')
async def change_sex(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Мужской', callback_data='male')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Женский', callback_data='female')
keyboard.add(btn2)
await message.reply(f'Выберите новый пол: ', reply_markup=keyboard)
await NewData.sex.set()
@dp.callback_query_handler(text='male', state=NewData.sex)
@dp.callback_query_handler(text='female', state=NewData.sex)
async def change_sex(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'male':
try:
await db.update_user_sex(sex='Мужской', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Ваш новый пол: <b>Мужской</b>',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'female':
try:
await db.update_user_sex(sex='Женский', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Ваш новый пол: <b>Женский</b>',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Машина')
async def change_car(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Есть', callback_data='true')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Нет', callback_data='false')
keyboard.add(btn2)
await message.reply(f'Есть ли у Вас машина?: ', reply_markup=keyboard)
await NewData.car.set()
@dp.callback_query_handler(text='true', state=NewData.car)
@dp.callback_query_handler(text='false', state=NewData.car)
async def change_car(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'true':
try:
await db.update_user_car(car=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>есть</b> машина',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'false':
try:
await db.update_user_car(car=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>нет</b> машины',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Дети')
async def change_kids(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Есть', callback_data='true')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Нет', callback_data='false')
keyboard.add(btn2)
await message.reply(f'Есть ли у Вас дети?: ', reply_markup=keyboard)
await NewData.child.set()
@dp.callback_query_handler(text='true', state=NewData.child)
@dp.callback_query_handler(text='false', state=NewData.child)
async def change_children(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'true':
try:
await db.update_user_kids(kids=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>есть</b> дети',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'false':
try:
await db.update_user_kids(kids=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>нет</b> детей',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Жилье')
async def change_home(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Есть', callback_data='true')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Нет', callback_data='false')
keyboard.add(btn2)
await message.reply(f'Есть ли у Вас квартира: ', reply_markup=keyboard)
await NewData.own_home.set()
@dp.callback_query_handler(text='true', state=NewData.own_home)
@dp.callback_query_handler(text='false', state=NewData.own_home)
async def change_home(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'true':
try:
await db.update_user_apartment(apartment=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>есть</b> квартира',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'false':
try:
await db.update_user_apartment(apartment=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>нет</b> квартиры',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Образование')
async def change_education(message: types.Message):
keyboard = types.InlineKeyboardMarkup()
btn1 = types.InlineKeyboardButton(text='Высшее', callback_data='higher_edu')
keyboard.add(btn1)
btn2 = types.InlineKeyboardButton(text='Среднее', callback_data='secondary_edu')
keyboard.add(btn2)
await message.reply(f'Какое у Вас образование: ', reply_markup=keyboard)
await NewData.education.set()
@dp.callback_query_handler(text='higher_edu', state=NewData.education)
@dp.callback_query_handler(text='secondary_edu', state=NewData.education)
async def change_education(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'higher_edu':
try:
await db.update_user_apartment(apartment=True, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>Высшее</b> образование',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
if call.data == 'secondary_edu':
try:
await db.update_user_apartment(apartment=False, telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь у вас: <b>Среднее</b> образование',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Занятие')
async def change_style(message: types.Message):
await message.reply(f'Чем вы занимаетесь?', reply_markup=lifestyle_inline_kb)
await NewData.hobbies.set()
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['study_lifestyle'])
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['work_lifestyle'])
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['job_find_lifestyle'])
@dp.callback_query_handler(state=NewData.hobbies,
text_contains=['householder_lifestyle'])
async def change_style(call: CallbackQuery, state: FSMContext):
await call.answer(cache_time=60)
if call.data == 'study_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Учусь', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы учитесь!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
elif call.data == 'work_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Работаю', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы работаете!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
elif call.data == 'job_find_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Ищу работу', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы ищете работу!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
elif call.data == 'householder_lifestyle':
try:
await db.update_user_lifestyle(lifestyle='Домохозяйка/Домохозяин', telegram_id=call.from_user.id)
await bot.send_message(call.from_user.id, f'Теперь вы домохозяин/домохозяйка!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await bot.send_message(call.from_user.id, f'Произошла неизвестная ошибка')
await state.reset_state()
await state.reset_state()
await call.message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='Фото')
async def new_photo(message: types.Message):
await message.reply(f'Отправьте мне новую фотографию')
await NewData.photo.set()
@dp.message_handler(content_types=ContentType.PHOTO, state=NewData.photo)
async def update_photo_complete(message: types.Message, state: FSMContext):
file_id = message.photo[0].file_id
try:
await db.update_user_photo_id(photo_id=file_id, telegram_id=message.from_user.id)
await message.reply(f'Фото принято!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла ошибка! Попробуйте еще раз либо отправьте другую фотографию. \n'
f'Если ошибка осталась, напишите системному администратору.')
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
@dp.message_handler(text='О себе')
async def new_comment(message: types.Message):
await message.reply(f'Отправьте мне новое описание анкеты: ')
await NewData.commentary.set()
@dp.message_handler(state=NewData.commentary)
async def update_comment_complete(message: types.Message, state: FSMContext):
try:
await db.update_user_commentary(commentary=message.text, telegram_id=message.from_user.id)
await message.reply(f'Комментарий принят!',
reply_markup=types.ReplyKeyboardRemove())
await state.reset_state()
except MessageToReplyNotFound:
await message.reply(f'Произошла ошибка! Попробуйте еще раз изменить описание. '
f'Возможно, Ваше сообщение слишком большое\n'
f'Если ошибка осталась, напишите системному администратору.')
await state.reset_state()
await message.answer("<b>❤️️ DATE_BOT</b> - платформа для поиска новых знакомств\n\n"
"<b>🤝 Сотрудничество: </b>\n"
"Если у вас есть предложение о сотрудничестве, пишите сюда - "
"@DRomanovizc", reply_markup=inline_start)
| 47.631455
| 116
| 0.658469
| 2,455
| 20,291
| 5.303055
| 0.079022
| 0.038405
| 0.057608
| 0.076811
| 0.841232
| 0.821568
| 0.79937
| 0.765957
| 0.734388
| 0.702281
| 0
| 0.002256
| 0.235572
| 20,291
| 425
| 117
| 47.743529
| 0.833989
| 0
| 0
| 0.601108
| 0
| 0
| 0.205313
| 0.008427
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.024931
| 0
| 0.024931
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9486bc815f43da55aa4f93243c2813ab26f8585b
| 15,741
|
py
|
Python
|
test/server/test_mailbox.py
|
BoniLindsley/pymap
|
b3190d20799a6d342888e51bfc55cdfcbfe3ed26
|
[
"MIT"
] | 18
|
2015-06-04T21:09:37.000Z
|
2022-03-04T08:14:31.000Z
|
test/server/test_mailbox.py
|
BoniLindsley/pymap
|
b3190d20799a6d342888e51bfc55cdfcbfe3ed26
|
[
"MIT"
] | 114
|
2018-10-17T23:11:00.000Z
|
2022-03-19T16:59:16.000Z
|
test/server/test_mailbox.py
|
BoniLindsley/pymap
|
b3190d20799a6d342888e51bfc55cdfcbfe3ed26
|
[
"MIT"
] | 8
|
2015-02-03T19:30:52.000Z
|
2021-11-20T12:47:03.000Z
|
from textwrap import dedent
import pytest
from .base import TestBase
pytestmark = pytest.mark.asyncio
class TestMailbox(TestBase):
async def test_list_sep(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'list1 LIST "" ""\r\n')
transport.push_write(
b'* LIST (\\Noselect) "/" ""\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_list(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'list1 LIST "" *\r\n')
transport.push_write(
b'* LIST (\\HasNoChildren) "/" INBOX\r\n'
b'* LIST (\\HasNoChildren) "/" Sent\r\n'
b'* LIST (\\HasNoChildren) "/" Trash\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_create(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'create1 CREATE "test mailbox"\r\n')
transport.push_write(
b'create1 OK [MAILBOXID (', (br'F[a-f0-9]+', ), b')]'
b' CREATE completed.\r\n')
transport.push_readline(
b'list1 LIST "" *\r\n')
transport.push_write(
b'* LIST (\\HasNoChildren) "/" INBOX\r\n'
b'* LIST (\\HasNoChildren) "/" Sent\r\n'
b'* LIST (\\HasNoChildren) "/" Trash\r\n'
b'* LIST (\\HasNoChildren) "/" "test mailbox"\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_create_inferior(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'create1 CREATE "Trash/test mailbox"\r\n')
transport.push_write(
b'create1 OK [MAILBOXID (', (br'F[a-f0-9]+', ), b')]'
b' CREATE completed.\r\n')
transport.push_readline(
b'list1 LIST "Trash" *\r\n')
transport.push_write(
b'* LIST (\\HasChildren) "/" Trash\r\n'
b'* LIST (\\HasNoChildren) "/" "Trash/test mailbox"\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_delete(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'delete1 DELETE Sent\r\n')
transport.push_write(
b'delete1 OK DELETE completed.\r\n')
transport.push_readline(
b'list2 LIST "" *\r\n')
transport.push_write(
b'* LIST (\\HasNoChildren) "/" INBOX\r\n'
b'* LIST (\\HasNoChildren) "/" Trash\r\n'
b'list2 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_delete_superior(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'create1 CREATE "Trash/test mailbox"\r\n')
transport.push_write(
b'create1 OK [MAILBOXID (', (br'F[a-f0-9]+', ), b')]'
b' CREATE completed.\r\n')
transport.push_readline(
b'delete1 DELETE Trash\r\n')
transport.push_write(
b'delete1 OK DELETE completed.\r\n')
transport.push_readline(
b'list1 LIST "Trash" *\r\n')
transport.push_write(
b'* LIST (\\Noselect \\HasChildren) "/" Trash\r\n'
b'* LIST (\\HasNoChildren) "/" "Trash/test mailbox"\r\n'
b'list1 OK LIST completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_delete_selected(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_select(b'Sent')
transport.push_readline(
b'delete1 DELETE Sent\r\n')
transport.push_write(
b'* BYE Selected mailbox no longer exists.\r\n'
b'delete1 OK DELETE completed.\r\n')
await self.run(transport)
async def test_lsub(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'lsub1 LSUB "" *\r\n')
transport.push_write(
b'* LSUB (\\HasNoChildren) "/" INBOX\r\n'
b'lsub1 OK LSUB completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_subscribe_unsubscribe(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'subscribe1 SUBSCRIBE "Sent"\r\n')
transport.push_write(
b'subscribe1 OK SUBSCRIBE completed.\r\n')
transport.push_readline(
b'subscribe2 SUBSCRIBE "Trash"\r\n')
transport.push_write(
b'subscribe2 OK SUBSCRIBE completed.\r\n')
transport.push_readline(
b'unsubscribe1 UNSUBSCRIBE "Trash"\r\n')
transport.push_write(
b'unsubscribe1 OK UNSUBSCRIBE completed.\r\n')
transport.push_readline(
b'lsub1 LSUB "" *\r\n')
transport.push_write(
b'* LSUB (\\HasNoChildren) "/" INBOX\r\n'
b'* LSUB (\\HasNoChildren) "/" Sent\r\n'
b'lsub1 OK LSUB completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_status(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'status1 STATUS INBOX '
b'(MESSAGES RECENT UIDNEXT UIDVALIDITY UNSEEN MAILBOXID)\r\n')
transport.push_write(
b'* STATUS INBOX (MESSAGES 4 RECENT 1 UIDNEXT 105 '
b'UIDVALIDITY ', (br'\d+', b'uidval1'), b' UNSEEN 2 '
b'MAILBOXID (', (br'F[a-f0-9]+', b'mbxid'), b'))\r\n'
b'status1 OK STATUS completed.\r\n')
transport.push_select(b'INBOX', 4, 1, 105, 3)
transport.push_readline(
b'status2 STATUS INBOX '
b'(MESSAGES RECENT UIDNEXT UIDVALIDITY UNSEEN)\r\n')
transport.push_write(
b'* STATUS INBOX (MESSAGES 4 RECENT 1 UIDNEXT 105 '
b'UIDVALIDITY ', (br'\d+', b'uidval2'), b' UNSEEN 2)\r\n'
b'status2 OK STATUS completed.\r\n')
transport.push_readline(
b'close1 CLOSE\r\n')
transport.push_write(
b'close1 OK CLOSE completed.\r\n')
transport.push_readline(
b'status3 STATUS INBOX '
b'(MESSAGES RECENT UIDNEXT UIDVALIDITY UNSEEN)\r\n')
transport.push_write(
b'* STATUS INBOX (MESSAGES 4 RECENT 0 UIDNEXT 105 '
b'UIDVALIDITY ', (br'\d+', b'uidval2'), b' UNSEEN 2)\r\n'
b'status3 OK STATUS completed.\r\n')
transport.push_logout()
await self.run(transport)
assert self.matches['uidval1'] == self.matches['uidval2']
assert self.matches['mbxid1'] == self.matches['mbxid']
async def test_append(self, imap_server):
transport = self.new_transport(imap_server)
message = b'test message\r\n'
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX (\\Seen) {%i}\r\n' % len(message))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105]'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX', 5, 2, 106, 3)
transport.push_logout()
await self.run(transport)
async def test_append_empty(self, imap_server):
transport = self.new_transport(imap_server)
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX {0}\r\n')
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(
b'')
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 NO APPEND cancelled.\r\n')
transport.push_select(b'INBOX', 4, 1, 105, 3)
transport.push_logout()
await self.run(transport)
async def test_append_multi(self, imap_server):
transport = self.new_transport(imap_server)
message_1 = b'test message\r\n'
message_2 = b'other test message\r\n'
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX (\\Seen) {%i}\r\n' % len(message_1))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message_1)
transport.push_readline(
b' {%i}\r\n' % len(message_2))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message_2)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105:106]'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX', 6, 3, 107, 3)
transport.push_logout()
await self.run(transport)
async def test_append_selected(self, imap_server):
transport = self.new_transport(imap_server)
message = b'test message\r\n'
transport.push_login()
transport.push_select(b'INBOX', 4, 1, 105, 3)
transport.push_readline(
b'append1 APPEND INBOX (\\Seen) {%i}\r\n' % len(message))
transport.push_write(
b'+ Literal string\r\n')
transport.push_readexactly(message)
transport.push_readline(
b'\r\n')
transport.push_write(
b'* 5 EXISTS\r\n'
b'* 2 RECENT\r\n'
b'* 5 FETCH (FLAGS (\\Recent \\Seen))\r\n'
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105]'
b' APPEND completed.\r\n')
transport.push_readline(
b'status1 STATUS INBOX (RECENT)\r\n')
transport.push_write(
b'* STATUS INBOX (RECENT 2)\r\n'
b'status1 OK STATUS completed.\r\n')
transport.push_logout()
await self.run(transport)
async def test_append_email_id(self, imap_server):
transport = self.new_transport(imap_server)
message_1 = b'test message\r\n'
message_2 = b'other test message\r\n'
message_3 = message_1
transport.push_login()
transport.push_readline(
b'append1 APPEND INBOX {%i+}\r\n' % len(message_1))
transport.push_readexactly(message_1)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+', ), b' 105]'
b' APPEND completed.\r\n')
transport.push_readline(
b'append2 APPEND INBOX {%i+}\r\n' % len(message_2))
transport.push_readexactly(message_2)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append2 OK [APPENDUID ', (br'\d+', ), b' 106]'
b' APPEND completed.\r\n')
transport.push_readline(
b'append3 APPEND INBOX {%i+}\r\n' % len(message_3))
transport.push_readexactly(message_3)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append3 OK [APPENDUID ', (br'\d+', ), b' 107]'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX')
transport.push_readline(
b'fetch1 UID FETCH 105:107 (EMAILID)\r\n')
transport.push_write(
b'* 5 FETCH (EMAILID (', (br'M[a-f0-9]+', b'id1'), b') '
b'UID 105)\r\n'
b'* 6 FETCH (EMAILID (', (br'M[a-f0-9]+', b'id2'), b') '
b'UID 106)\r\n'
b'* 7 FETCH (EMAILID (', (br'M[a-f0-9]+', b'id3'), b') '
b'UID 107)\r\n'
b'fetch1 OK UID FETCH completed.\r\n')
transport.push_logout()
await self.run(transport)
assert self.matches['id1'] != self.matches['id2']
assert self.matches['id1'] == self.matches['id3']
async def test_append_thread_id(self, imap_server):
messages = [dedent("""\
Message-Id: <one>
Subject: thread one
""").encode('ascii'),
dedent("""\
Message-Id: <one>
Subject: Fwd: thread one
""").encode('ascii'),
dedent("""\
Message-Id: <one>
Subject: unrelated to thread one
""").encode('ascii'),
dedent("""\
Message-Id: <two>
Subject: thread two
""").encode('ascii'),
dedent("""\
Message-Id: <three>
In-Reply-To: <two>
Subject: Re: thread two
""").encode('ascii'),
dedent("""\
Message-Id: <four>
References: <two> <five>
Subject: [a list] thread two
""").encode('ascii'),
dedent("""\
Message-Id: <five>
Subject: thread two
""").encode('ascii')]
transport = self.new_transport(imap_server)
transport.push_login()
for i, message in enumerate(messages):
transport.push_readline(
b'append1 APPEND INBOX {%i+}\r\n' % len(message))
transport.push_readexactly(message)
transport.push_readline(
b'\r\n')
transport.push_write(
b'append1 OK [APPENDUID ', (br'\d+ \d+', ), b']'
b' APPEND completed.\r\n')
transport.push_select(b'INBOX')
transport.push_readline(
b'fetch1 UID FETCH 105:* (THREADID)\r\n')
transport.push_write(
b'* 5 FETCH (THREADID (', (br'T[a-f0-9]+', b'id1'), b') '
b'UID 105)\r\n'
b'* 6 FETCH (THREADID (', (br'T[a-f0-9]+', b'id2'), b') '
b'UID 106)\r\n'
b'* 7 FETCH (THREADID (', (br'T[a-f0-9]+', b'id3'), b') '
b'UID 107)\r\n'
b'* 8 FETCH (THREADID (', (br'T[a-f0-9]+', b'id4'), b') '
b'UID 108)\r\n'
b'* 9 FETCH (THREADID (', (br'T[a-f0-9]+', b'id5'), b') '
b'UID 109)\r\n'
b'* 10 FETCH (THREADID (', (br'T[a-f0-9]+', b'id6'), b') '
b'UID 110)\r\n'
b'* 11 FETCH (THREADID (', (br'T[a-f0-9]+', b'id7'), b') '
b'UID 111)\r\n'
b'fetch1 OK UID FETCH completed.\r\n')
transport.push_logout()
await self.run(transport)
assert self.matches['id1'] == self.matches['id2']
assert self.matches['id1'] != self.matches['id3']
assert self.matches['id1'] != self.matches['id4']
assert self.matches['id4'] == self.matches['id5']
assert self.matches['id4'] == self.matches['id6']
assert self.matches['id4'] == self.matches['id7']
| 39.550251
| 74
| 0.540309
| 1,902
| 15,741
| 4.356993
| 0.081493
| 0.197659
| 0.095571
| 0.130325
| 0.8885
| 0.874744
| 0.853385
| 0.798842
| 0.758417
| 0.718113
| 0
| 0.024845
| 0.322406
| 15,741
| 397
| 75
| 39.649874
| 0.75211
| 0
| 0
| 0.692935
| 0
| 0
| 0.333164
| 0
| 0
| 0
| 0
| 0
| 0.027174
| 1
| 0
| false
| 0
| 0.008152
| 0
| 0.01087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
84902710b82dfced01f7fd7a95db3148f03bdd03
| 5,771
|
py
|
Python
|
tests/test_parameter.py
|
lukasz-migas/SimpleParam
|
a796df44c675d67d82a73ff5cac689fd626ebfde
|
[
"MIT"
] | null | null | null |
tests/test_parameter.py
|
lukasz-migas/SimpleParam
|
a796df44c675d67d82a73ff5cac689fd626ebfde
|
[
"MIT"
] | 16
|
2019-10-28T07:43:59.000Z
|
2021-10-19T02:40:28.000Z
|
tests/test_parameter.py
|
lukasz-migas/SimpleParam
|
a796df44c675d67d82a73ff5cac689fd626ebfde
|
[
"MIT"
] | null | null | null |
"""Test Parameter class"""
import operator
import pytest
import simpleparam as param
class TestParameterSetup(object):
"""Test Parameter class"""
@staticmethod
def test_creation_float():
"""Test Parameter - correct initilization"""
value = 1.0
num_a = param.Parameter(value=value)
assert num_a.value == value
@staticmethod
def test_creation_doc():
"""Test Parameter - correct initilization"""
value = 42.01
doc = "I am a parameter"
num_a = param.Parameter(value=value, doc=doc)
assert num_a.value == value
assert num_a.doc == doc
@staticmethod
def test_allow_none():
"""Test Parameter - correct initilization"""
value = None
num_a = param.Parameter(value=value, allow_None=True)
assert num_a.value == value
@staticmethod
def test_kind():
"""Test Parameter - correct initilization"""
value = 11.01474
num_a = param.Parameter(value=value)
assert num_a.kind == "Parameter"
@staticmethod
def test_set_kind():
"""Test Parameter - correct initilization"""
value = 11.01474
num_a = param.Parameter(value=value)
num_a.kind = "Number"
assert num_a.kind == "Number"
@staticmethod
def test_setting_wrong():
"""Test Parameter - correct initilization"""
with pytest.raises(ValueError) as __:
value = 11.01474
num_a = param.Parameter(value=value, allow_None="False")
class TestParameterOperations(object):
"""Test Parameter class operations"""
@staticmethod
def test_add():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = value + 1
num_a.value = num_a.__add__(1)
assert num_a.value == new_value
@staticmethod
def test_sub():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = value - 1
num_a.value = num_a.__sub__(1)
assert num_a.value == new_value
@staticmethod
def test_div():
"""Test Parameter - correct initilization"""
value = 42.0
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = value / 2
num_a.value = num_a.__truediv__(2)
assert num_a.value == new_value
@staticmethod
def test_mul():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = value * 2
num_a.value = num_a.__mul__(2)
assert num_a.value == new_value
@staticmethod
def test_pow():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = value ** 2
num_a.value = num_a.__pow__(2)
assert num_a.value == new_value
@staticmethod
def test_floordiv():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = value // 2
num_a.value = num_a.__floordiv__(2)
assert num_a.value == new_value
@staticmethod
def test_mod():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = value % 2
num_a.value = num_a.__mod__(2)
assert num_a.value == new_value
@staticmethod
def test_rshift():
"""Test Parameter - correct initilization"""
value = 42
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = operator.rshift(value, 1)
num_a.value = num_a.__rshift__(1)
assert num_a.value == new_value
@staticmethod
def test_lshift():
"""Test Parameter - correct initilization"""
value = 42
num_a = param.Parameter(value=value)
assert num_a.value == value
new_value = operator.lshift(value, 1)
num_a.value = num_a.__lshift__(1)
assert num_a.value == new_value
@staticmethod
def test_lt():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
assert num_a.value.__lt__(100)
@staticmethod
def test_gt():
"""Test Parameter - correct initilization"""
value = 42.01
num_a = param.Parameter(value=value)
assert num_a.value == value
assert num_a.value.__gt__(1)
@staticmethod
def test_abs():
"""Test Parameter - correct initilization"""
value = -42.01
num_a = param.Parameter(value=value)
assert num_a.__abs__() == abs(value)
@staticmethod
def test_neg():
"""Test Parameter - correct initilization"""
value = -42.01
num_a = param.Parameter(value=value)
assert num_a.__neg__() == -value
@staticmethod
def test_pos():
"""Test Parameter - correct initilization"""
value = -42.01
num_a = param.Parameter(value=value)
assert num_a.__pos__() == +value
@staticmethod
def test_setting_wrong():
"""Test Parameter - correct initilization"""
with pytest.raises(ValueError) as __:
value = 11.01474
num_a = param.Parameter(value=value, allow_None="False")
del num_a.value
| 27.480952
| 68
| 0.599203
| 688
| 5,771
| 4.764535
| 0.09157
| 0.087858
| 0.096095
| 0.114399
| 0.816657
| 0.793472
| 0.772727
| 0.748322
| 0.710189
| 0.707139
| 0
| 0.026303
| 0.295096
| 5,771
| 209
| 69
| 27.61244
| 0.779499
| 0.154566
| 0
| 0.594406
| 0
| 0
| 0.009878
| 0
| 0
| 0
| 0
| 0
| 0.216783
| 1
| 0.146853
| false
| 0
| 0.020979
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8492fd3f1f271356c44ebfc81f6321f5668b3208
| 54,720
|
py
|
Python
|
joelib/physics/jethead.py
|
Joefdez/joelib
|
bd0a7b623f0cd7f5f7e459a8b6ac3df6035e6083
|
[
"MIT"
] | 1
|
2019-10-20T22:36:34.000Z
|
2019-10-20T22:36:34.000Z
|
joelib/physics/jethead.py
|
Joefdez/joelib
|
bd0a7b623f0cd7f5f7e459a8b6ac3df6035e6083
|
[
"MIT"
] | null | null | null |
joelib/physics/jethead.py
|
Joefdez/joelib
|
bd0a7b623f0cd7f5f7e459a8b6ac3df6035e6083
|
[
"MIT"
] | null | null | null |
from numpy import *
import joelib.constants.constants as cts
from joelib.physics.synchrotron_afterglow import *
from scipy.stats import binned_statistic
from scipy.interpolate import interp1d
from tqdm import tqdm
class jetHeadUD(adiabatic_afterglow):
###############################################################################################
# Methods for initializing the cells in the jet head
###############################################################################################
def __init__(self, EE, Gam0, nn, epE, epB, pp, DD, steps, evolution, nlayers, joAngle, shell_type='thin', Rb=1.):#, obsAngle=0.0):
self.nlayers = nlayers # Number of layers for the partition
#self.nn1 = nn1 # Number of cells in the first layer
self.__totalCells() # obtain self.ncells
self.joAngle = joAngle # Jet opening angle
#self.obsAngle = obsAngle # Angle of jet axis with respect to line of sight
self.angExt = 2.*pi*(1.-cos(joAngle)) # Solid area covered by the jet head
self.cellSize = self.angExt/self.ncells # Angular size of each cell
self.__makeCells() # Generate the cells: calculate the angular positions of the shells
adiabatic_afterglow.__init__(self, EE, Gam0, nn, epE, epB, pp, DD, steps, evolution, shell_type, Rb)
self.ee = EE/self.ncells # Energy per cell
def __makeCells(self):
"""
This method generates the individual cells: positions of borders between cells
and angular positions of the cells themselves.
"""
self.layer = array([])
self.thetas = array([])
self.phis = array([])
self.cthetas = array([])
self.cphis = array([])
fac1 = arange(0,self.nlayers+1)/float(self.nlayers) # Numerical factor for use during execution
self.thetas = 2.*arcsin(fac1*sin(self.joAngle/4.)) # Calculate the propagation angle with respect to jet axis
for ii in range(self.nlayers): # Loop over layers and populate the arrays
num = self.cellsInLayer(ii)
self.phis = append(self.phis, arange(0,num+1)*2.*pi/num) # Phi value of the edges
self.layer = append(self.layer,ones(num)*(ii+1)) # Layer on which the cells are
self.cthetas = append(self.cthetas,ones(num)*0.5*(self.thetas[ii]+self.thetas[ii+1])) # Central theta values of the cells
self.cphis = append(self.cphis,(arange(0,num)+0.5)*2.*pi/num ) # Central phi values of the cells
#num = int(round(self.cellsInLayer(ii)/2))
#self.layer = append(self.layer,ones(num+1)*(ii+1)) # Layer on which the phi edges are
#self.phis = append(self.phis, arange(0,num+1)*2.*pi/num) # Phi value of the edges
#self.cthetas = append(self.cthetas,ones(num)*0.5*(self.thetas[ii]+self.thetas[ii+1])) # Central theta values
#self.cphis = append(self.cphis,(arange(0,num)+0.5)*pi/num ) # Central phi values
def __totalCells(self):
tot = 0
for ii in range(0,self.nlayers):
tot = tot + self.cellsInLayer(ii)
#tot = tot + int(round(self.cellsInLayer(ii)/2))
self.ncells = tot
###############################################################################################
# Methods used by initializers and for getting different physics and general methods not used by initializers
###############################################################################################
def cellsInLayer(self, ii):
"""
Return number of cells in layer ii
"""
return (2*ii+1)
def obsangle(self, theta_obs):
"""
Return the cosine of the observer angle for the different shockwave segments and and
and observer at and angle theta_obs with respect to the jet axis
(contained in yz plane)
"""
#u_obs_x, u_obs_y, u_obs_z = 0., sin(theta_obs), cos(theta_obs)
u_obs_y, u_obs_z = sin(theta_obs), cos(theta_obs)
#seg_x =
seg_y = sin(self.cthetas)*sin(self.cphis)
seg_z = cos(self.cthetas)
#return arccos(u_obs_x*seg_x + u_obs_y*seg_y + u_obs_z*seg_z)
return u_obs_y*seg_y + u_obs_z*seg_z
def obsangle_cj(self, theta_obs):
"""
Return the cosine of the observer angle for the different shockwave
segments in the counter jet and observer at an angle theta_obs with respect to the jet axis
(contained in yz plane)
"""
#u_obs_x, u_obs_y, u_obs_z = 0., sin(theta_obs), cos(theta_obs)
u_obs_y, u_obs_z = sin(theta_obs), cos(theta_obs)
#seg_x =
seg_y = sin(pi-self.cthetas)*sin(self.cphis)
seg_z = cos(pi-self.cthetas)
#return arccos(u_obs_x*seg_x + u_obs_y*seg_y + u_obs_z*seg_z)
return u_obs_y*seg_y + u_obs_z*seg_z
def dopplerFactor(self, cosa, beta):
"""
Calculate the doppler factors of the different jethead segments
cosa -> cosine of observeration angle, obtained using obsangle
"""
return (1.-beta)/(1.-beta*cosa)
def light_curve_adiabatic(self, theta_obs, obsFreqs, tt0, ttf, num, Rb):
if type(obsFreqs)==float:
obsFreqs = array([obsFreqs])
calpha = self.obsangle(theta_obs)
alpha = arccos(calpha)
calpha_cj = self.obsangle_cj(theta_obs)
alpha_cj = arccos(calpha_cj)
if self.evolution == 'adiabatic':
max_Tobs = max(obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, max(alpha)))/cts.sTd
max_Tobs_cj = max(obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, max(alpha_cj)))/cts.sTd
elif self.evolution == 'peer':
max_Tobs = max(obsTime_offAxis_General(self.RRs, self.TTs, max(alpha)))/cts.sTd
max_Tobs_cj = max(obsTime_offAxis_General(self.RRs, self.TTs, max(alpha_cj)))/cts.sTd
if (ttf>max_Tobs or ttf>max_Tobs_cj):
print("ttf larger than maximum observable time. Adjusting value.")
ttf = min(max_Tobs, max_Tobs_cj)
lt0 = log10(tt0*cts.sTd) # Convert to seconds and then logspace
ltf = log10(ttf*cts.sTd) # Convert to seconds and then logspace
tts = logspace(lt0, ltf+(ltf-lt0)/num, num) # Timeline on which the flux is evaluated.
light_curve = zeros([len(obsFreqs), num])
light_curve_RS = zeros([len(obsFreqs), num])
light_curve_CJ = zeros([len(obsFreqs), num])
for ii in tqdm(range(self.ncells)):
#for ii in range(self.ncells):
onAxisTint = interp1d(self.RRs, self.TTs)
ttobs = obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, alpha[ii])
ttobs_cj = obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, alpha_cj[ii])
filTM = where(tts<=max(ttobs))[0]
filTm = where(tts[filTM]>=min(ttobs))[0]
filTM_cj = where(tts<=max(ttobs_cj))[0]
filTm_cj = where(tts[filTM_cj]>=min(ttobs_cj))[0]
Rint = interp1d(ttobs, self.RRs)
Robs = Rint(tts[filTM][filTm])
GamObs = self.GamInt(Robs)
BetaObs = sqrt(1.-GamObs**(-2.))
#if self.evolution == 'adiabatic':
# onAxisTobs = obsTime_onAxis_adiabatic(Robs, BetaObs)
#elif self.evolution == 'peer':
# onAxisTobs = obsTime_onAxis_integrated(Robs, GamObs, BetaObs)
onAxisTobs = onAxisTint(Robs)
# Forward shock stuff
Bfield = sqrt(32.*pi*self.nn*self.epB*cts.mp)*cts.cc*GamObs
gamMobs, nuMobs = minGam(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield)
gamCobs, nuCobs = critGam(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield, onAxisTobs)
Fnuobs = fluxMax(Robs, GamObs, self.nn, Bfield, self.DD)
# Reverse shock stuff
nuM_RS, nuC_RS, Fnu_RS = params_tt_RS(self, onAxisTobs, Rb)
# Counter-jet stuff
Rint_cj = interp1d(ttobs_cj, self.RRs)
Robs_cj = Rint_cj(tts[filTM_cj][filTm_cj])
GamObs_cj = self.GamInt(Robs_cj)
BetaObs_cj = sqrt(1.-GamObs_cj**(-2.))
#onAxisTint = interp1d(self.RRs, self.TTs)
#if self.evolution == 'adiabatic':
# onAxisTobs = obsTime_onAxis_adiabatic(Robs, BetaObs)
#elif self.evolution == 'peer':
# onAxisTobs = obsTime_onAxis_integrated(Robs, GamObs, BetaObs)
onAxisTobs_cj = onAxisTint(Robs_cj)
Bfield_cj = sqrt(32.*pi*self.nn*self.epB*cts.mp)*cts.cc*GamObs_cj
gamMobs_cj, nuMobs_cj = minGam(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj)
gamCobs_cj, nuCobs_cj = critGam(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj, onAxisTobs_cj)
Fnuobs_cj = fluxMax(Robs_cj, GamObs_cj, self.nn, Bfield_cj, self.DD)
dopFacs = self.dopplerFactor(calpha[ii], BetaObs)
afac = self.cellSize/maximum(self.cellSize*ones(num)[filTM][filTm], 2.*pi*(1.-cos(1./GamObs)))
dopFacs_cj = self.dopplerFactor(calpha_cj[ii], BetaObs_cj)
afac_cj = self.cellSize/maximum(self.cellSize*ones(num)[filTM_cj][filTm_cj], 2.*pi*(1.-cos(1./GamObs_cj)))
for freq in obsFreqs:
fil1, fil2 = where(gamMobs<=gamCobs)[0], where(gamMobs>gamCobs)[0]
fil3, fil4 = where(nuM_RS<=nuC_RS)[0], where(nuM_RS>nuC_RS)[0]
fil5, fil6 = where(nuMobs_cj<=nuCobs_cj)[0], where(nuMobs_cj>nuCobs_cj)[0]
freqs = freq/dopFacs # Calculate the rest-frame frequencies correspondng to the observed frequency
freqs_cj = freq/dopFacs_cj
light_curve[obsFreqs==freq, filTM[filTm][fil1]] = light_curve[obsFreqs==freq, filTM[filTm][fil1]] + (
afac[fil1] * dopFacs[fil1]**3. * FluxNuSC_arr(self, nuMobs[fil1], nuCobs[fil1], Fnuobs[fil1], freqs[fil1]))*calpha[ii]
#light_curve[obsFreqs==freq, filTM[filTm][fil2]] = light_curve[obsFreqs==freq, filTM[filTm][fil2]] + (
# afac[fil2] * dopFacs[fil2]**3. * FluxNuFC_arr(self, nuMobs[fil2], nuCobs[fil2], Fnuobs[fil2], freqs[fil2]))*calpha[ii]
light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] + (
afac[fil3] * dopFacs[fil3]**3. * FluxNuSC_arr(self, nuM_RS[fil3], nuC_RS[fil3], Fnu_RS[fil3], freqs[fil3]))*calpha[ii]
#light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] + (
# afac[fil4] * dopFacs[fil4]**3. * FluxNuFC_arr(self, nuM_RS[fil4], nuC_RS[fil4], Fnu_RS[fil4], freqs[fil4]))*calpha[ii]
light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] = light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] + (
afac_cj[fil5] * dopFacs_cj[fil5]**3. * FluxNuSC_arr(self, nuMobs_cj[fil5], nuCobs_cj[fil5], Fnuobs_cj[fil5], freqs_cj[fil5]))*calpha_cj[ii]
#return tts, 2.*light_curve, 2.*light_curve_RS
return tts, light_curve, light_curve_RS, light_curve_CJ
def light_curve_peer(self, theta_obs, obsFreqs, tt0, ttf, num, Rb):
if type(obsFreqs)==float:
obsFreqs = array([obsFreqs])
calpha = self.obsangle(theta_obs)
alpha = arccos(calpha)
calpha_cj = self.obsangle_cj(theta_obs)
alpha_cj = arccos(calpha_cj)
if self.evolution == 'adiabatic':
max_Tobs = max(obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, max(alpha)))/cts.sTd
max_Tobs_cj = max(obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, max(alpha_cj)))/cts.sTd
elif self.evolution == 'peer':
max_Tobs = max(obsTime_offAxis_General(self.RRs, self.TTs, max(alpha)))/cts.sTd
max_Tobs_cj = max(obsTime_offAxis_General(self.RRs, self.TTs, max(alpha_cj)))/cts.sTd
if (ttf>max_Tobs or ttf>max_Tobs_cj):
print("ttf larger than maximum observable time. Adjusting value. ")
ttf = min(max_Tobs, max_Tobs_cj)
lt0 = log10(tt0*cts.sTd) # Convert to seconds and then logspace
ltf = log10(ttf*cts.sTd) # Convert to seconds and then logspace
tts = logspace(lt0, ltf+(ltf-lt0)/num, num) # Timeline on which the flux is evaluated.
light_curve = zeros([len(obsFreqs), num])
light_curve_RS = zeros([len(obsFreqs), num])
light_curve_CJ = zeros([len(obsFreqs), num])
for ii in tqdm(range(self.ncells)):
#for ii in range(self.ncells):
onAxisTint = interp1d(self.RRs, self.TTs)
ttobs = obsTime_offAxis_General(self.RRs, self.TTs, alpha[ii])
ttobs_cj = obsTime_offAxis_General(self.RRs, self.TTs, alpha_cj[ii])
#ttobs = obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, alpha[ii])
filTM = where(tts<=max(ttobs))[0]
filTm = where(tts[filTM]>=min(ttobs))[0]
filTM_cj = where(tts<=max(ttobs))[0]
filTm_cj = where(tts[filTM_cj]>=min(ttobs))[0]
#print(len(tts[filT]))
Rint = interp1d(ttobs, self.RRs)
Robs = Rint(tts[filTM][filTm])
GamObs = self.GamInt(Robs)
BetaObs = sqrt(1.-GamObs**(-2.))
#if self.evolution == 'adiabatic':
# onAxisTobs = obsTime_onAxis_adiabatic(Robs, BetaObs)
#elif self.evolution == 'peer':
# onAxisTobs = obsTime_onAxis_integrated(Robs, GamObs, BetaObs)
onAxisTobs = onAxisTint(Robs)
Rint_cj = interp1d(ttobs_cj, self.RRs)
Robs_cj= Rint(tts[filTM_cj][filTm_cj])
GamObs_cj = self.GamInt(Robs_cj)
BetaObs_cj = sqrt(1.-GamObs_cj**(-2.))
onAxisTobs_cj = onAxisTint(Robs_cj)
# Forward shock stuff
#gamMobs, gamCobs = self.gamMI(Robs), self.gamCI(Robs)
#nuMobs, nuCobs = self.nuMI(Robs), self.nuCI(Robs)
#Fnuobs = self.FnuMI(Robs)
#Bfield = sqrt(32.*pi*cts.mp*self.nn*self.epB*GamObs*(GamObs-1.))*cts.cc
Bfield = Bfield_modified(GamObs, BetaObs, self.nn, self.epB)
gamMobs, nuMobs = minGam_modified(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield, self.Xp)
gamCobs, nuCobs = critGam_modified(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield, onAxisTobs)
Fnuobs = fluxMax_modified(Robs, GamObs, self.nn, Bfield, self.DD, self.PhiP)
Bfield_cj = Bfield_modified(GamObs_cj, BetaObs_cj, self.nn, self.epB)
gamMobs_cj, nuMobs_cj = minGam_modified(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj, self.Xp)
gamCobs_cj, nuCobs_cj = critGam_modified(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj, onAxisTobs_cj)
Fnuobs_cj = fluxMax_modified(Robs_cj, GamObs_cj, self.nn, Bfield_cj, self.DD, self.PhiP)
# Reverse shock stuff
nuM_RS, nuC_RS, Fnu_RS = params_tt_RS(self, onAxisTobs, Rb)
dopFacs = self.dopplerFactor(calpha[ii], BetaObs)
afac = self.cellSize/maximum(self.cellSize*ones(num)[filTM][filTm], 2.*pi*(1.-cos(1./GamObs)))
dopFacs_cj = self.dopplerFactor(calpha_cj[ii], BetaObs_cj)
afac = self.cellSize/maximum(self.cellSize*ones(num)[filTM_cj][filTm_cj], 2.*pi*(1.-cos(1./GamObs_cj)))
for freq in obsFreqs:
fil1, fil2 = where(gamMobs<=gamCobs)[0], where(gamMobs>gamCobs)[0]
fil3, fil4 = where(nuM_RS<=nuC_RS)[0], where(nuM_RS>nuC_RS)[0]
fil5, fil6 = where(nuMobs_cj<=nuCobs_cj)[0], where(nuMobs_cj>nuCobs_cj)[0]
freqs = freq/dopFacs # Calculate the rest-frame frequencies correspondng to the observed frequency
freqs_cj = freq/dopFacs_cj
light_curve[obsFreqs==freq, filTM[filTm][fil1]] = light_curve[obsFreqs==freq, filTM[filTm][fil1]] + (
self.cellSize * (GamObs[fil1]*(1.-BetaObs[fil1]*calpha[ii]))**(-3.) * FluxNuSC_arr(self, nuMobs[fil1], nuCobs[fil1], Fnuobs[fil1], freqs[fil1]))#*calpha[ii]
#light_curve[obsFreqs==freq, filTM[filTm][fil2]] = light_curve[obsFreqs==freq, filTM[filTm][fil2]] + (
# afac[fil2] * dopFacs[fil2]**3. * FluxNuFC_arr(self, nuMobs[fil2], nuCobs[fil2], Fnuobs[fil2], freqs[fil2]))*calpha[ii]
light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] + (
self.cellSize * (GamObs[fil3]*(1.-BetaObs[fil3]*calpha[ii]))**(-3.) * FluxNuSC_arr(self, nuM_RS[fil3], nuC_RS[fil3], Fnu_RS[fil3], freqs[fil3]))#*calpha[ii]
#light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] + (
# afac[fil4] * dopFacs[fil4]**3. * FluxNuFC_arr(self, nuM_RS[fil4], nuC_RS[fil4], Fnu_RS[fil4], freqs[fil4]))*calpha[ii]
light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] = light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] + (
self.cellSize * (GamObs_cj[fil5]*(1.-BetaObs_cj[fil5]*calpha_cj[ii]))**(-3.) * FluxNuSC_arr(self, nuMobs_cj[fil5], nuCobs_cj[fil5], Fnuobs_cj[fil5], freqs_cj[fil3]))#*calpha[ii]
#return tts, 2.*light_curve, 2.*light_curve_RS
return tts, light_curve, light_curve_RS, light_curve_CJ
def lightCurve_interp(self, theta_obs, obsFreqs, tt0, ttf, num, Rb):
if self.evolution == "adiabatic":
tts, light_curve, light_curve_RS, light_curve_CJ = self.light_curve_adiabatic(theta_obs, obsFreqs, tt0, ttf, num, Rb)
elif self.evolution == "peer":
tts, light_curve, light_curve_RS,light_curve_CJ = self.light_curve_peer(theta_obs, obsFreqs, tt0, ttf, num, Rb)
return tts, light_curve, light_curve_RS, light_curve_CJ
def skymap(self, theta_obs, tt_obs, freq, nx, ny, xx0, yy0):
calpha = zeros([2*self.ncells])
alpha = zeros([2*self.ncells])
calpha[:self.ncells] = self.obsangle(theta_obs)
calpha[self.ncells:] = self.obsangle_cj(theta_obs)
alpha = arccos(calpha)
TTs, RRs, Gams, Betas = zeros(2*self.ncells), zeros(2*self.ncells), zeros(2*self.ncells), zeros(2*self.ncells)
#nuMs, nuCs, fluxes = zeros(2.*self.ncells), zeros(2.*self.ncells), zeros(2.*self.ncells)
fluxes = zeros(2*self.ncells)
im_xxs, im_yys = zeros(2*self.ncells), zeros(2*self.ncells)
im_xxs[:self.ncells] = -1.*cos(theta_obs)*sin(self.cthetas)*sin(self.cphis) + sin(theta_obs)*cos(self.cthetas)
im_yys[:self.ncells] = sin(self.cthetas)*cos(self.cphis)
im_xxs[self.ncells:] = -1.*cos(theta_obs)*sin(pi-self.cthetas)*sin(self.cphis) + sin(theta_obs)*cos(pi-self.cthetas)
im_yys[self.ncells:] = sin(pi-self.cthetas)*cos(self.cphis)
indices = where(im_yys>0)
if self.evolution == 'adiabatic':
Tint = interp1d(self.RRs, self.TTs)
for ii in tqdm(indices):#tqdm(range(self.ncells)):
ttobs = obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, alpha[ii])
ttobs_cj = obsTime_offAxis_UR(self.RRs, self.TTs, self.Betas, alpha[ii+self.ncells])
Rint = interp1d(ttobs, self.RRs)
Rint_cj = interp1d(ttobs_cj, self.RRs)
RRs[ii] = Rint(tt_obs)
RRs[ii+self.ncells] = Rint_cj(tt_obs)
TTs[ii], TTs[ii+self.ncells] = Tint(RRs[ii]), Tint(RRs[ii+self.ncells])
Gams[ii], Gams[ii+self.ncells] = self.GamInt(RRs[ii]), self.GamInt(RRs[ii+self.ncells])
Betas = sqrt(1.-Gams**(-2.))
Bf = (32.*pi*self.nn*self.epB*cts.mp)**(1./2.) * Gams*cts.cc
gamM, nuM = minGam(Gams, self.epE, self.epB, self.nn, self.pp, Bf)
gamC, nuC = critGam(Gams, self.epE, self.epB, self.nn, self.pp, Bf, TTs)
fMax = fluxMax(RRs, Gams, self.nn, Bf, self.DD)
dopFacs = self.dopplerFactor(calpha, sqrt(1.-Gams**(-2)))
afac = self.cellSize/maximum(self.cellSize*ones(len(Gams)), 2.*pi*(1.-cos(1./Gams)))
obsFreqs = freq/dopFacs
fluxes = (self.DD**2./(calpha*self.cellSize*RRs**2.)) *afac * dopFacs**3. * FluxNuSC_arr(self, nuM, nuC, fMax, obsFreqs)*1./calpha
#fluxes = afac * dopFacs**3. * FluxNuSC_arr(self, nuM, nuC, fMax, obsFreqs)*calpha
elif self.evolution == 'peer':
Tint = interp1d(self.RRs, self.TTs)
for ii in tqdm(range(self.ncells)):
ttobs = obsTime_offAxis_General(self.RRs, self.TTs, alpha[ii])
ttobs_cj = obsTime_offAxis_General(self.RRs, self.TTs, alpha[ii+self.ncells])
Rint, Rint_cj = interp1d(ttobs, self.RRs), interp1d(ttobs_cj, self.RRs)
RRs[ii], RRs[ii+self.ncells] = Rint(tt_obs), Rint_cj(tt_obs)
TTs[ii], TTs[ii+self.ncells] = Tint(RRs[ii]), Tint(RRs[ii+self.ncells])
Gams[ii], Gams[ii+self.ncells] = self.GamInt(RRs[ii]), self.GamInt(RRs[ii+self.ncells])
Betas = sqrt(1.-Gams**(-2.))
Bf = Bfield_modified(Gams, Betas, self.nn, self.epB)
gamM, nuM = minGam_modified(Gams, self.epE, self.epB, self.nn, self.pp, Bf, self.Xp)
gamC, nuC = critGam_modified(Gams, self.epE, self.epB, self.nn, self.pp, Bf, TTs)
fMax = fluxMax_modified(RRs, Gams, self.nn, Bf, self.DD, self.PhiP)
dopFacs = self.dopplerFactor(calpha, sqrt(1.-Gams**(-2)))
obsFreqs = freq/dopFacs
#fluxes = (self.DD/self.cellSize*RRs)**2. * self.cellSize * (Gams*(1.-Betas*calpha))**(-3.) * FluxNuSC_arr(self, nuM, nuC, fMax, obsFreqs)*1./calpha
fluxes = (self.DD**2./(calpha*self.cellSize*RRs**2.)) * self.cellSize * (Gams*(1.-Betas*calpha))**(-3.) * FluxNuSC_arr(self, nuM, nuC, fMax, obsFreqs)
#fluxes = self.cellSize * (Gams*(1.-Betas*calpha))**(-3.) * FluxNuSC_arr(self, nuM, nuC, fMax, obsFreqs)#*calpha
im_xxs = RRs*im_xxs
im_yys = RRs*im_yys
return im_xxs, im_yys, fluxes, RRs, Gams, calpha, TTs
class jetHeadGauss(jetHeadUD):
def __init__(self, EE, Gam0, nn, epE, epB, pp, DD, steps, evolution, nlayers, joAngle, coAngle, shell_type='thin', Rb=1.):
# In this case, EE refers to the total energy and Gam0 to the central Gam0 value
self.coAngle = coAngle
jetHeadUD.__init__(self, EE, Gam0, nn, epE, epB, pp, DD, steps, evolution, nlayers, joAngle, shell_type, Rb)
self.__energies_and_LF()
if self.evolution == 'adiabatic':
self.cell_Rds = (3./(4.*pi) * 1./(cts.cc**2.*cts.mp) *
self.cell_EEs/(self.nn*self.cell_Gam0s**2.))**(1./3.)
self.cell_Tds = self.cell_Rds/(cts.cc*self.cell_Beta0s) * (1.-self.cell_Beta0s)
#self.cell_Tds = self.cell_Rds/(2.*cts.cc*self.cell_Gam0s**2.)
#self.Rd/(2.*self.Gam0**2 * cts.cc)
elif self.evolution == 'peer':
self.cell_Rds = (3./(4.*pi) * 1./(cts.cc**2.*cts.mp) *
self.cell_EEs/(self.nn*self.cell_Gam0s**2.))**(1./3.)
self.cell_Tds = self.cell_Rds/(cts.cc*self.cell_Beta0s) * (1.-self.cell_Beta0s)
print("Calculating dynamical evolution")
self.__evolve()
print("Calculating reverse shock parmeters")
self.__peakParamsRS_struc()
def __energies_and_LF(self):
#AngFacs = exp(-1.*self.cthetas**2./(2.*self.coAngle**2.))
self.cell_EEs = self.EE * exp(-1.*self.cthetas**2./(self.coAngle**2.)) # Just for texting
#self.cell_EEs = self.EE * exp(-1.*self.cthetas**2./(self.coAngle**2.))
self.cell_Gam0s = 1.+(self.Gam0-1)*exp(-1.*self.cthetas**2./(2.*self.coAngle**2.))
self.cell_Beta0s = sqrt(1.-(self.cell_Gam0s)**(-2.))
def __evolve(self):
if self.evolution == 'peer':
self.RRs, self.Gams, self.Betas = self.evolve_relad_struct()
self.TTs = self.obsTime_onAxis_struct()
self.Bfield = Bfield_modified(self.Gams, self.Betas, self.nn, self.epB)
elif self.evolution == 'adiabatic':
self.RRs, self.Gams, self.Betas = self.evolve_ad_struct()
self.TTs = self.obsTime_onAxis_struct()
self.Bfield = (32.*pi*cts.mp*self.epB*self.nn)**(1./2.)*self.Gams*cts.cc
def __peakParamsRS_struc(self):
RSpeak_nuM_struc = zeros(self.ncells)
RSpeak_nuC_struc = zeros(self.ncells)
RSpeak_Fnu_struc = zeros(self.ncells)
if self.shell_type=='thin':
print("Setting up thin shell")
for ii in tqdm(range(self.ncells)):
#self.RSpeak_nuM = 9.6e14 * epE**2. * epB**(1./2.) * nn**(1./2) * Gam0**2.
#self.RSpeak_nuC = 4.0e16 * epB**(-3./2.) * EE**(-2./3.) * nn**(-5./6.) * Gam0**(4./3.)
#self.RSpeak_Fnu = 5.2 * DD**(-2.) * epB**(1./2.) * EE * nn**(1./2.) * Gam0
Rd, Td = self.cell_Rds[ii], self.cell_Tds[ii]
#print Rd
if self.evolution == 'peer':
#print shape(self.RRs), shape(self.Gams)
GamsInt = interp1d(self.RRs[:], self.Gams[:,ii])
Gam0 = GamsInt(Rd)
Beta0 = sqrt(1.-Gam0**(-2.))
Bf = Bfield_modified(Gam0, Beta0, self.nn, self.epB)
gamM, nuM = minGam_modified(Gam0, self.epE, self.epB, self.nn, self.pp, Bf, self.Xp)
gamC, nuC = critGam_modified(Gam0, self.epE, self.epB, self.nn, self.pp, Bf, Td)
Fnu = fluxMax_modified(Rd, Gam0, self.nn, Bf, self.DD, self.PhiP)
elif self.evolution == 'adiabatic':
GamsInt = interp1d(self.RRs[:,ii], self.Gams[:,ii])
Gam0 = GamsInt(Rd)
Bf = (32.*pi*cts.mp*self.epB*self.nn)**(1./2.)*Gam0*cts.cc
gamM, nuM = minGam(Gam0, self.epE, self.epB, self.nn, self.pp, Bf)
gamC, nuC = critGam(Gam0, self.epE, self.epB, self.nn, self.pp, Bf, Td)
Fnu = fluxMax(Rd, Gam0, self.nn, Bf, self.DD)
#print Rd, max(self.RRs[:,ii]), min(self.RRs[:,ii]), self.cell_Gam0s[ii], self.cthetas[ii]
#gamM = self.epE*(self.pp-2.)/(self.pp-1.) * cts.mp/cts.me * Gam0
#gamC = 3.*cts.me/(16.*self.epB*cts.sigT*cts.mp*cts.cc*Gam0**3.*Td*self.nn)
#nuM = Gam0*gamM**2.*cts.qe*(32.*pi*cts.mp*self.epB*self.nn)**(1./2.)*Gam0*cts.cc/(2.*pi*cts.me*cts.cc)
#nuC = Gam0*gamC**2.*cts.qe*(32.*pi*cts.mp*self.epB*self.nn)**(1./2.)*Gam0*cts.cc/(2.*pi*cts.me*cts.cc)
#Fnu = self.nn**(3./2.)*Rd**3. * cts.sigT * cts.cc**3. *cts.me* (32.*pi*cts.mp*self.epB
# )**(1./2.)*Gam0**2./(9.*cts.qe*self.DD**2.)
#RSpeak_nuM_struc[ii] = nuM/(self.cell_Gam0s[ii]**2.)
#RSpeak_nuC_struc[ii] = nuC
#RSpeak_Fnu_struc[ii] = self.cell_Gam0s[ii] * Fnu
RSpeak_nuM_struc[ii] = nuM/(Gam0**2)
RSpeak_nuC_struc[ii] = nuC
RSpeak_Fnu_struc[ii] = Gam0*Fnu
self.RSpeak_nuM_struc = RSpeak_nuM_struc #self.Rb**(1./2.)*RSpeak_nuM_struc
self.RSpeak_nuC_struc = RSpeak_nuC_struc #self.Rb**(-3./2.)*RSpeak_nuC_struc
self.RSpeak_Fnu_struc = RSpeak_Fnu_struc #self.Rb**(1./2.)*RSpeak_Fnu_struc
def evolve_relad_struct(self):
"""
Evolution following Pe'er 2012. Adbaiatic expansion into a cold, uniform ISM using conservation of energy in relativstic form. This solution
transitions smoothly from the ultra-relativistic to the Newtonian regime. Modified for stuctured jet
"""
Gam0 = self.Gam0
Rl = self.Rd * Gam0**(2./3.)
RRs = logspace(log10(self.Rd/1000.), log10(Rl)+3., self.steps+1) #10
#MMs = 4.*pi * cts.mp*self.nn*RRs**3./3.#4./3. *pi*cts.mp*self.nn*RRs**3.
MMs = 4./3. * pi*RRs**3. * self.nn * cts.mp
#Gams[0,:] = self.cell_Gam0s
#print("Calculating Gamma as a function of R for each cell")
print("Calculating dynamical evolution for each layer")
#for ii in tqdm(range(1,len(self.Betas))):
# Gams[ii,:] = rk4(dgdm_struc, self, log10(MMs[ii-1]), Gams[ii-1,:], (log10(MMs[ii])-log10(MMs[ii-1])))
for ii in tqdm(range(self.nlayers)):
# Set up initial conditions for the layer
#GamEv[0] = Gams[0,self.layer==ii+1][0]
MM0 = self.cell_EEs[self.layer==ii+1][0]/(self.cell_Gam0s[self.layer==ii+1][0]*cts.cc**2.)
self.cell_Gam0s[self.layer==ii+1][0]
#Gams = zeros(len(RRs))
GamEv = zeros([len(RRs)])
GamEv[0] = self.cell_Gam0s[self.layer==ii+1][0]
# Calculate dynamical evolution of the layer
for jj in range(1, len(GamEv)):
GamEv[jj] = rk4(dgdm_mod, MM0, log10(MMs[jj-1]), GamEv[jj-1], (log10(MMs[jj])-log10(MMs[jj-1])))
# Share the values with the rest of the cells of the layer
if ii==0:
Gams = array([GamEv,]).T
else:
GamEv = array([GamEv]*self.cellsInLayer(ii)).T
#Gams = column_stack((Gams, GamEv))
Gams = concatenate([Gams, GamEv], axis=1)
Betas = sqrt(1.-1./Gams**2.)
#Betas[-1] = 0.0
#print(shape(Gams))
return RRs, Gams, Betas
def evolve_ad_struct(self):
"""
Evolution following simple energy conservation for an adiabatically expanding relativistic shell. Same scaling as
Blanford-Mckee blastwave solution. This calculation is only valid in ultrarelativstic phase.
"""
Gam = self.Gam0
GamSD = 1.021
Rsd = Gam**(2./3.) *self.Rd / GamSD # Radius at Lorentz factor=1.005 -> after this point use Sedov-Taylor scaling
Rl = self.Rd * self.Gam0**(2./3.)
#RRs = logspace(log10(self.Rd/100.), log10(Rl), self.steps+1) #10
RRs = zeros([self.steps+1, self.ncells])
Gams = zeros([self.steps+1, self.ncells])
Betas = zeros([self.steps+1, self.ncells])
Gams[0,:] = self.cell_Gam0s
for ii in range(self.ncells):
RRs[:,ii] = logspace(log10(self.cell_Rds[ii]/100.), log10(0.9999*self.cell_Rds[ii] * self.cell_Gam0s[ii]**(2./3.)), self.steps+1) # All start at same point
Gams[RRs[:,ii]<=self.cell_Rds[ii],ii] = self.cell_Gam0s[ii]
Gams[RRs[:,ii]>self.cell_Rds[ii], ii] = (self.cell_Rds[ii]/RRs[RRs[:,ii]>self.cell_Rds[ii],ii])**(3./2.) * self.cell_Gam0s[ii]
#Gams[RRs>=Rsd] = 1./sqrt( 1.-(Rsd/RRs[RRs>=Rsd])**(6.)*(1.-1./(Gams[(RRs>jet.Rd) & (RRs<Rsd)][-1]**2.)))
#Gams[RRs>=jet.Rd] = odeint(jet.dgdr, jet.Gam0, RRs[RRs>=jet.Rd])[:,0]
#Gams[RRs>=jet.Rd] = odeint(jet.dgdr, jet.Gam0, RRs[RRs>=jet.Rd])[:,0]
Betas[RRs[:,ii]<=self.cell_Rds[ii],ii] = sqrt(1.-(1./self.cell_Gam0s[ii])**2.)
Betas[RRs[:,ii]>self.cell_Rds[ii], ii] = sqrt(1.-(1./Gams[RRs[:,ii]>self.cell_Rds[ii], ii])**2.)
Betas[-1,:] = 0.
#Gams[Gams<=1.] = 1.
return RRs, Gams, Betas
def obsTime_onAxis_struct(self):
"""
On-axis observer times calculated for each individual cell
"""
print("Calculating on-axis observerd time for each cell")
#for ii in tqdm(range(1,len(self.Betas))):
if self.evolution == "adiabatic":
for layer in range(self.nlayers):
if layer==0:
TTs = obsTime_onAxis_adiabatic(self.RRs[:, layer],self.Betas[:, layer])
else:
layerTime = obsTime_onAxis_adiabatic(self.RRs[:, self.layer==layer+1][:,0], self.Betas[:, self.layer==layer+1][:,0])
for cell in range(self.cellsInLayer(layer)):
TTs = column_stack((TTs, layerTime))
elif self.evolution == "peer":
for layer in tqdm(range(self.nlayers)):
if layer==0:
TTs = obsTime_onAxis_integrated(self.RRs, self.Gams[:, layer], self.Betas[:, layer])
TTs = array([TTs,]).T
else:
layerTime = obsTime_onAxis_integrated(self.RRs, self.Gams[:, self.layer==layer+1][:,0],
self.Betas[:, self.layer==layer+1][:,0])
#TTs = column_stack((TTs, layerTime))
layerTime = array([layerTime]*self.cellsInLayer(layer)).T
TTs = concatenate([TTs, layerTime], axis=1)
return TTs
def params_tt_RS(self, tt, ii, Rb):
if type(tt) == 'float': tt = array([tt])
fil1, fil2 = where(tt<=self.cell_Tds[ii])[0], where(tt>self.cell_Tds[ii])[0]
#print ii, len(tt)
nuM = zeros(len(tt))
nuC = zeros(len(tt))
fluxMax = zeros(len(tt))
#print len(nuM), len(nuC), len()
nuM[fil1] = self.RSpeak_nuM_struc[ii]*(tt[fil1]/self.cell_Tds[ii])**(6.)
nuC[fil1] = self.RSpeak_nuC_struc[ii]*(tt[fil1]/self.cell_Tds[ii])**(-2.)
fluxMax[fil1] = self.RSpeak_Fnu_struc[ii]*(tt[fil1]/self.cell_Tds[ii])**(3./2.) # Returns fluxes in Jy
nuM[fil2] = self.RSpeak_nuM_struc[ii]*(tt[fil2]/self.cell_Tds[ii])**(-54./35.)
nuC[fil2] = self.RSpeak_nuC_struc[ii]*(tt[fil2]/self.cell_Tds[ii])**(4./35.)
fluxMax[fil2] = self.RSpeak_Fnu_struc[ii]*(tt[fil2]/self.cell_Tds[ii])**(-34./35.) # Returns fluxes in Jy
return Rb**(1./2.)*nuM, Rb**(-3./2.)*nuC, Rb**(1./2.)*fluxMax
def light_curve_adiabatic(self, theta_obs, obsFreqs, tt0, ttf, num, Rb):
if type(obsFreqs)==float:
obsFreqs = array([obsFreqs])
calpha = self.obsangle(theta_obs)
alpha = arccos(calpha)
# Obserer angle for the counter-jet
calpha_cj = self.obsangle_cj(theta_obs)
alpha_cj = arccos(calpha_cj)
Tfil = self.TTs[:,-1]== max(self.TTs[:,-1])
max_Tobs = self.RRs[Tfil, -1]/(self.Betas[Tfil,-1]*cts.cc) * (1.-self.Betas[Tfil,-1]*cos(max(alpha)))
#max_Tobs_oa = max(self.TTs[:,-1])
#max_Tobs = max(obsTime_offAxis(self, self.RRs, self.TTs[:,alpha==max(alpha)][:,0], max(alpha)))/cts.sTd
if ttf>max_Tobs:
print("ttf larger than maximum observable time. Adjusting value. ")
ttf = max_Tobs
lt0 = log10(tt0*cts.sTd) # Convert to seconds and then logspace
ltf = log10(ttf*cts.sTd) # Convert to seconds and then logspace
tts = logspace(lt0, ltf+(ltf-lt0)/num, num) # Timeline on which the flux is evaluated.
light_curve = zeros([len(obsFreqs), num])
light_curve_RS = zeros([len(obsFreqs), num])
light_curve_CJ = zeros([len(obsFreqs), num])
for ii in tqdm(range(self.ncells)):
#for ii in range(self.ncells):
ttobs = obsTime_offAxis_UR(self.RRs[:,ii], self.TTs[:,ii], self.Betas[:,ii], alpha[ii])
RRs = self.RRs[:,ii]
filTM = where(tts<=max(ttobs))[0]
filTm = where(tts[filTM]>=min(ttobs))[0]
filTM_cj = where(tts<=max(ttobs))[0]
filTm_cj = where(tts[filTM_cj]>=min(ttobs))[0]
Rint = interp1d(ttobs, RRs)
Gamint = interp1d(RRs, self.Gams[:,ii])
Robs = Rint(tts[filTM][filTm])
GamObs = Gamint(Robs)
BetaObs = sqrt(1.-GamObs**(-2.))
dopFacs = self.dopplerFactor(calpha[ii], sqrt(1.-GamObs**(-2)))
afac = self.cellSize/maximum(self.cellSize*ones(num)[filTM][filTm], 2.*pi*(1.-cos(1./GamObs)))
onAxisTobs = dopFacs*tts[filTM][filTm]
# Forward shock stuff
Bfield = sqrt(32.*pi*self.nn*self.epB*cts.mp)*cts.cc*GamObs
gamMobs, nuMobs = minGam(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield)
gamCobs, nuCobs = critGam(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield, onAxisTobs)
Fnuobs = fluxMax(Robs, GamObs, self.nn, Bfield, self.DD)
#Reverse shock stuff
nuM_RS, nuC_RS, Fnu_RS = self.params_tt_RS(onAxisTobs, ii, Rb)
# Counter jet stuff
ttobs_cj = obsTime_offAxis_UR(self.RRs[:,ii], self.TTs[:,ii], self.Betas[:,ii], alpha_cj[ii])
filTM_cj = where(tts<=max(ttobs_cj))[0]
filTm_cj = where(tts[filTM]>=min(ttobs_cj))[0]
Rint_cj = interp1d(ttobs_cj, RRs)
#Gamint = interp1d(RRs, self.Gams[:,ii])
Robs_cj = Rint(tts[filTM_cj][filTm_cj])
GamObs_cj = Gamint(Robs_cj)
if len(GamObs_cj)==0: continue
BetaObs_cj = sqrt(1.-GamObs_cj**(-2.))
dopFacs_cj = self.dopplerFactor(calpha_cj[ii], sqrt(1.-GamObs_cj**(-2)))
afac_cj = self.cellSize/maximum(self.cellSize*ones(num)[filTM_cj][filTm_cj], 2.*pi*(1.-cos(1./GamObs_cj)))
onAxisTobs_cj = dopFacs_cj*tts[filTM_cj][filTm_cj]
Bfield_cj = sqrt(32.*pi*self.nn*self.epB*cts.mp)*cts.cc*GamObs_cj
gamMobs_cj, nuMobs_cj = minGam(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj)
gamCobs_cj, nuCobs_cj = critGam(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj, onAxisTobs_cj)
Fnuobs_cj = fluxMax(Robs_cj, GamObs_cj, self.nn, Bfield_cj, self.DD)
dopFacs_cj = self.dopplerFactor(calpha_cj[ii], sqrt(1.-GamObs_cj**(-2)))
dopFacs = self.dopplerFactor(calpha[ii], sqrt(1.-GamObs**(-2)))
afac = self.cellSize/maximum(self.cellSize*ones(num)[filTM][filTm], 2.*pi*(1.-cos(1./GamObs)))
for freq in obsFreqs:
fil1, fil2 = where(gamMobs<=gamCobs)[0], where(gamMobs>gamCobs)[0]
fil3, fil4 = where(nuM_RS<=nuC_RS)[0], where(nuM_RS>nuC_RS)[0]
fil5, fil6 = where(nuMobs_cj<=nuCobs_cj)[0], where(nuMobs_cj>nuCobs_cj)[0]
freqs = freq/dopFacs # Calculate the rest-frame frequencies correspondng to the observed frequency
freqs_cj = freq/dopFacs_cj
#print shape(freqs), shape(freqs[fil1]), shape(nuMobs[fil1]), shape(nuCobs[fil1]), shape(Fnuobs[fil1]), shape(afac[fil1]), shape(calpha)
#print shape(light_curve[obsFreqs==freq, filT]), shape([fil1])
#print fil1
light_curve[obsFreqs==freq, filTM[filTm][fil1]] = light_curve[obsFreqs==freq, filTM[filTm][fil1]] + (
afac[fil1] * dopFacs[fil1]**3. * FluxNuSC_arr(self, nuMobs[fil1], nuCobs[fil1], Fnuobs[fil1], freqs[fil1]))*calpha[ii]
#light_curve[obsFreqs==freq, filTM[filTm][fil2]] = light_curve[obsFreqs==freq, filTM[filTm][fil2]] + (
# afac[fil2] * dopFacs[fil2]**3. * FluxNuFC_arr(self, nuMobs[fil2], nuCobs[fil2], Fnuobs[fil2], freqs[fil2]))*calpha[ii]
light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] + (
afac[fil3] * dopFacs[fil3]**3. * FluxNuSC_arr(self, nuM_RS[fil3], nuC_RS[fil3], Fnu_RS[fil3], freqs[fil3]))*calpha[ii]
#light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] + (
# afac[fil4] * dopFacs[fil4]**3. * FluxNuFC_arr(self, nuM_RS[fil4], nuC_RS[fil4], Fnu_RS[fil4], freqs[fil4]))*calpha[ii]
light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] = light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] + (
afac_cj[fil5] * dopFacs_cj[fil5]**3. * FluxNuSC_arr(self, nuMobs_cj[fil5], nuCobs_cj[fil5], Fnuobs_cj[fil5], freqs_cj[fil5]))*calpha_cj[ii]
return tts, light_curve, light_curve_RS, light_curve_CJ
#return tts, 2.*light_curve, 2.*light_curve_RS
def light_curve_peer(self, theta_obs, obsFreqs, tt0, ttf, num, Rb):
if type(obsFreqs)==float:
obsFreqs = array([obsFreqs])
calpha = self.obsangle(theta_obs)
alpha = arccos(calpha)
# Obserer angle for the counter-jet
calpha_cj = self.obsangle_cj(theta_obs)
alpha_cj = arccos(calpha_cj)
Tfil = self.TTs[:,-1]== max(self.TTs[:,-1])
max_Tobs = max(obsTime_offAxis_General(self.RRs, self.TTs[:,-1], max(alpha)))
if ttf>max_Tobs:
print("ttf larger than maximum observable time. Adjusting value.")
ttf = max_Tobs
lt0 = log10(tt0*cts.sTd) # Convert to seconds and then logspace
ltf = log10(ttf*cts.sTd) # Convert to seconds and then logspace
tts = logspace(lt0, ltf+(ltf-lt0)/num, num) # Timeline on which the flux is evaluated.
light_curve = zeros([len(obsFreqs), num])
light_curve_RS = zeros([len(obsFreqs), num])
light_curve_CJ = zeros([len(obsFreqs), num])
RRs = self.RRs
for ii in tqdm(range(self.ncells)):
ttobs = obsTime_offAxis_General(self.RRs, self.TTs[:,ii], alpha[ii])
filTM = where(tts<=max(ttobs))[0]
filTm = where(tts[filTM]>=min(ttobs))[0]
Rint = interp1d(ttobs, RRs)
Gamint = interp1d(RRs, self.Gams[:,ii])
Robs = Rint(tts[filTM][filTm])
GamObs = Gamint(Robs)
BetaObs = sqrt(1.-GamObs**(-2.))
if len(GamObs)==0: continue
onAxisTint = interp1d(RRs, self.TTs[:,ii])
#onAxisTobs = obsTime_onAxis_integrated(Robs, GamObs, BetaObs)
onAxisTobs = onAxisTint(Robs)
#Bfield = sqrt(32.*pi*cts.mp*self.nn*self.epB*GamObs*(GamObs-1.))*cts.cc
#gamMobs, nuMobs = minGam_modified(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield)
#gamCobs, nuCobs = critGam_modified(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield, onAxisTobs)
Bfield = Bfield_modified(GamObs, BetaObs, self.nn, self.epB)
gamMobs, nuMobs = minGam_modified(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield, self.Xp)
gamCobs, nuCobs = critGam_modified(GamObs, self.epE, self.epB, self.nn, self.pp, Bfield, onAxisTobs)
#nuMobs, nuCobs = GamObs*nuMobs, GamObs*nuCobs
Fnuobs = fluxMax_modified(Robs, GamObs, self.nn, Bfield, self.DD, self.PhiP)
#Reverse shock stuff
nuM_RS, nuC_RS, Fnu_RS = self.params_tt_RS(onAxisTobs, ii, Rb)
dopFacs = self.dopplerFactor(calpha[ii], sqrt(1.-GamObs**(-2)))
# Counter jet stuff
ttobs_cj = obsTime_offAxis_General(self.RRs, self.TTs[:,ii], alpha_cj[ii])
filTM_cj = where(tts<=max(ttobs_cj))[0]
filTm_cj = where(tts[filTM_cj]>=min(ttobs_cj))[0]
Rint_cj = interp1d(ttobs_cj, RRs)
#Gamint = interp1d(RRs, self.Gams[:,ii])
Robs_cj = Rint(tts[filTM_cj][filTm_cj])
GamObs_cj = Gamint(Robs_cj)
if len(GamObs_cj)==0: continue
BetaObs_cj = sqrt(1.-GamObs_cj**(-2.))
onAxisTobs_cj = onAxisTint(Robs_cj)
Bfield_cj = Bfield_modified(GamObs_cj, BetaObs_cj, self.nn, self.epB)
gamMobs_cj, nuMobs_cj = minGam_modified(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj, self.Xp)
gamCobs_cj, nuCobs_cj = critGam_modified(GamObs_cj, self.epE, self.epB, self.nn, self.pp, Bfield_cj, onAxisTobs_cj)
Fnuobs_cj = fluxMax_modified(Robs_cj, GamObs_cj, self.nn, Bfield_cj, self.DD, self.PhiP)
dopFacs_cj = self.dopplerFactor(calpha_cj[ii], sqrt(1.-GamObs_cj**(-2)))
#nuMobs = nuMobs/dopFacs
#nuCobs = nuCobs/dopFacs
#nuMobs_cj = nuMobs_cj/dopFacs_cj
#nuCobs_cj = nuCobs_cj/dopFacs_cj
for freq in obsFreqs:
fil1, fil2 = where(gamMobs<=gamCobs)[0], where(gamMobs>gamCobs)[0]
fil3, fil4 = where(nuM_RS<=nuC_RS)[0], where(nuM_RS>nuC_RS)[0]
freqs = freq/dopFacs # Calculate the rest-frame frequencies correspondng to the observed frequency
light_curve[obsFreqs==freq, filTM[filTm][fil1]] = light_curve[obsFreqs==freq, filTM[filTm][fil1]] + (
self.cellSize*(GamObs[fil1]*(1.-BetaObs[fil1]*calpha[ii]))**(-3.) * FluxNuSC_arr(self, nuMobs[fil1], nuCobs[fil1], Fnuobs[fil1], freqs[fil1]))#*calpha[ii]
#light_curve[obsFreqs==freq, filTM[filTm][fil2]] = light_curve[obsFreqs==freq, filTM[filTm][fil2]] + (
# (GamObs[fil2]*(1.-BetaObs[fil2]*calpha[fil2][ii]))**(-3.) * FluxNuFC_arr(self, nuMobs[fil2], nuCobs[fil2], Fnuobs[fil2], freqs[fil2]))#*calpha[ii]
light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil3]] + (
self.cellSize*(GamObs[fil3]*(1.-BetaObs[fil3]*calpha[ii]))**(-3.) * FluxNuSC_arr(self, nuM_RS[fil3], nuC_RS[fil3], Fnu_RS[fil3], freqs[fil3]))#*calpha[ii]
#light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] = light_curve_RS[obsFreqs==freq, filTM[filTm][fil4]] + (
# (GamObs[fil4]*(1.-BetaObs[fil4]*calpha[fil4][ii]))**(-3.)* FluxNuFC_arr(self, nuM_RS[fil4], nuC_RS[fil4], Fnu_RS[fil4], freqs[fil4]))#*calpha[ii]
fil5, fil6 = where(nuMobs_cj<=nuCobs_cj)[0], where(nuMobs_cj>nuCobs_cj)[0]
freqs_cj = freq/dopFacs_cj
light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] = light_curve_CJ[obsFreqs==freq, filTM_cj[filTm_cj][fil5]] + (
self.cellSize*(GamObs_cj[fil5]*(1.-BetaObs_cj[fil5]*calpha_cj[ii]))**(-3.) * FluxNuSC_arr(self,
nuMobs_cj[fil5], nuCobs_cj[fil5], Fnuobs_cj[fil5], freqs_cj[fil5]))#*calpha[ii]
return tts, light_curve, light_curve_RS, light_curve_CJ
#return tts, 2.*light_curve, 2.*light_curve_RS
def lightCurve_interp(self, theta_obs, obsFreqs, tt0, ttf, num, Rb):
if self.evolution == "adiabatic":
tts, light_curve, light_curve_RS, light_curve_CJ = self.light_curve_adiabatic(theta_obs, obsFreqs, tt0, ttf, num, Rb)
elif self.evolution == "peer":
tts, light_curve, light_curve_RS, light_curve_CJ = self.light_curve_peer(theta_obs, obsFreqs, tt0, ttf, num, Rb)
return tts, light_curve, light_curve_RS, light_curve_CJ
def skymap(self, theta_obs, tt_obs, freq, nx, ny, xx0, yy0):
calpha = zeros([2*self.ncells])
alpha = zeros([2*self.ncells])
calpha[:self.ncells] = self.obsangle(theta_obs)
calpha[self.ncells:] = self.obsangle_cj(theta_obs)
alpha = arccos(calpha)
TTs, RRs, Gams, Betas = zeros(2*self.ncells), zeros(2*self.ncells), zeros(2*self.ncells), zeros(2*self.ncells)
#nuMs, nuCs, fluxes = zeros(2.*self.ncells), zeros(2.*self.ncells), zeros(2.*self.ncells)
fluxes = zeros(2*self.ncells)
im_xxs, im_yys = zeros(2*self.ncells), zeros(2*self.ncells)
im_xxs[:self.ncells] = -1.*cos(theta_obs)*sin(self.cthetas)*sin(self.cphis) + sin(theta_obs)*cos(self.cthetas)
im_yys[:self.ncells] = sin(self.cthetas)*cos(self.cphis)
im_xxs[self.ncells:] = -1.*cos(theta_obs)*sin(pi-self.cthetas)*sin(self.cphis) + sin(theta_obs)*cos(pi-self.cthetas)
im_yys[self.ncells:] = sin(pi-self.cthetas)*cos(self.cphis)
if self.evolution == 'adiabatic':
for ii in tqdm(range(self.ncells)):
Tint = interp1d(self.RRs[:,ii], self.TTs[:,ii])
ttobs = obsTime_offAxis_UR(self.RRs[:,ii], self.TTs[:,ii], self.Betas[:,ii], alpha[ii])
ttobs_cj = obsTime_offAxis_UR(self.RRs[:,ii], self.TTs[:,ii], self.Betas[:,ii], alpha[ii+self.ncells])
Rint = interp1d(ttobs, self.RRs[:,ii])
Rint_cj = interp1d(ttobs_cj, self.RRs[:,ii])
RRs[ii] = Rint(tt_obs)
RRs[ii+self.ncells] = Rint_cj(tt_obs)
TTs[ii], TTs[ii+self.ncells] = Tint(RRs[ii]), Tint(RRs[ii+self.ncells])
GamInt = interp1d(self.RRs[:,ii], self.Gams[:,ii])
Gams[ii], Gams[ii+self.ncells] = GamInt(RRs[ii]), GamInt(RRs[ii+self.ncells])
Betas = sqrt(1.-Gams**(-2.))
Bf = (32.*pi*self.nn*self.epB*cts.mp)**(1./2.) * Gams*cts.cc
gamM, nuM = minGam(Gams, self.epE, self.epB, self.nn, self.pp, Bf)
gamC, nuC = critGam(Gams, self.epE, self.epB, self.nn, self.pp, Bf, TTs)
flux = fluxMax(RRs, Gams, self.nn, Bf, self.DD)
#fluxMax[Gams<=2] = 0.
dopFacs = self.dopplerFactor(calpha, sqrt(1.-Gams**(-2)))
afac = self.cellSize/maximum(self.cellSize, 2.*pi*(1.-cos(1./Gams)))
obsFreqs = freq/dopFacs
fluxes = (self.DD**2./(abs(calpha)*self.cellSize*RRs**2.)) * afac * dopFacs**3. * FluxNuSC_arr(self, nuM, nuC, flux, obsFreqs)
elif self.evolution == 'peer':
for ii in tqdm(range(self.ncells)):
Tint = interp1d(self.RRs, self.TTs[:,ii])
ttobs = obsTime_offAxis_General(self.RRs, self.TTs[:,ii], alpha[ii])
ttobs_cj = obsTime_offAxis_General(self.RRs, self.TTs[:,ii], alpha[ii+self.ncells])
Rint, Rint_cj = interp1d(ttobs, self.RRs), interp1d(ttobs_cj, self.RRs)
RRs[ii], RRs[ii+self.ncells] = Rint(tt_obs), Rint_cj(tt_obs)
TTs[ii], TTs[ii+self.ncells] = Tint(RRs[ii]), Tint(RRs[ii+self.ncells])
GamInt = interp1d(self.RRs, self.Gams[:,ii])
Gams[ii], Gams[ii+self.ncells] = GamInt(RRs[ii]), GamInt(RRs[ii+self.ncells])
Betas = sqrt(1.-Gams**(-2.))
Bf = Bfield_modified(Gams, Betas, self.nn, self.epB)
gamM, nuM = minGam_modified(Gams, self.epE, self.epB, self.nn, self.pp, Bf, self.Xp)
gamC, nuC = critGam_modified(Gams, self.epE, self.epB, self.nn, self.pp, Bf, TTs)
flux = fluxMax_modified(RRs, Gams, self.nn, Bf, self.DD, self.PhiP)
#fluxMax[Gams<=5] = 0.
#nuM, nuC = nuM/Gams, nuC/Gams
dopFacs = self.dopplerFactor(calpha, Betas)
obsFreqs = freq/dopFacs
#afac = self.cellSize/maximum(self.cellSize*ones(self.ncells), 2.*pi*(1.-cos(1./Gams)))
fluxes = (self.DD**2./(abs(calpha)*self.cellSize*RRs**2.)) *self.cellSize* (Gams*(1.-Betas*calpha))**(-3.) * FluxNuSC_arr(self, nuM, nuC, flux, obsFreqs)
#fluxes = (Gams*(1.-Betas*calpha))**(-3.) * FluxNuSC_arr(self, nuM, nuC, fluxMax, obsFreqs)*1./calpha
fluxes2 = self.cellSize*(Gams*(1.-Betas*calpha))**(-3.)*FluxNuSC_arr(self, nuM, nuC, flux, obsFreqs)
im_xxs = RRs*im_xxs
im_yys = RRs*im_yys
return im_xxs, im_yys, fluxes, fluxes2, RRs, Gams, calpha, TTs
| 52.514395
| 218
| 0.549708
| 7,116
| 54,720
| 4.088954
| 0.059444
| 0.034024
| 0.016497
| 0.015191
| 0.804207
| 0.781936
| 0.764581
| 0.743891
| 0.72602
| 0.710039
| 0
| 0.024217
| 0.300457
| 54,720
| 1,041
| 219
| 52.564842
| 0.735913
| 0.206488
| 0
| 0.659612
| 0
| 0
| 0.013438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040564
| false
| 0
| 0.010582
| 0
| 0.082892
| 0.015873
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
170398553e4b48d83d9bd6cd273d4ca0f20ef6ba
| 38
|
py
|
Python
|
sampling_free/modeling/generalized_rcnn/rpn/retinanet/__init__.py
|
ChenJoya/sampling-free
|
01dfd40cf794ee5afea4f052216483f3901ecd20
|
[
"MIT"
] | 266
|
2019-09-26T07:03:48.000Z
|
2022-02-23T13:26:16.000Z
|
sampling_free/modeling/generalized_rcnn/rpn/retinanet/__init__.py
|
chenjoya/sampling-free
|
01dfd40cf794ee5afea4f052216483f3901ecd20
|
[
"MIT"
] | 9
|
2019-10-24T00:41:49.000Z
|
2021-12-31T01:26:08.000Z
|
sampling_free/modeling/generalized_rcnn/rpn/retinanet/__init__.py
|
chenjoya/sampling-free
|
01dfd40cf794ee5afea4f052216483f3901ecd20
|
[
"MIT"
] | 20
|
2019-11-07T10:03:18.000Z
|
2021-11-13T14:03:31.000Z
|
from .retinanet import build_retinanet
| 38
| 38
| 0.894737
| 5
| 38
| 6.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ca15fdad5a80692271fd86a5bf72cf604221fb61
| 346
|
py
|
Python
|
ppdb_app/models.py
|
aryanicosa/ppdb_mvt
|
cb4674fba6cb6663e7a5710f9a4b695c3bd574be
|
[
"MIT"
] | null | null | null |
ppdb_app/models.py
|
aryanicosa/ppdb_mvt
|
cb4674fba6cb6663e7a5710f9a4b695c3bd574be
|
[
"MIT"
] | null | null | null |
ppdb_app/models.py
|
aryanicosa/ppdb_mvt
|
cb4674fba6cb6663e7a5710f9a4b695c3bd574be
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Users(models.Model):
role = models.CharField(max_length=20)
username = models.CharField(max_length=100)
fullname = models.CharField(max_length=100)
password = models.CharField(max_length=200)
def __str__(self):
return self.username and self.password
| 28.833333
| 47
| 0.725434
| 46
| 346
| 5.282609
| 0.565217
| 0.246914
| 0.296296
| 0.395062
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038869
| 0.182081
| 346
| 12
| 48
| 28.833333
| 0.819788
| 0.069364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.25
| 0.125
| 0.125
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
ca7461af2eebddf3c5e63bbfac5a10c1214e0b0c
| 145
|
py
|
Python
|
descriptive/descriptive/doctype/industries/test_industries.py
|
ujjwalkumar93/descriptive
|
080c97f9bfcf4aec73d1400ca7ee346437ccdeef
|
[
"MIT"
] | null | null | null |
descriptive/descriptive/doctype/industries/test_industries.py
|
ujjwalkumar93/descriptive
|
080c97f9bfcf4aec73d1400ca7ee346437ccdeef
|
[
"MIT"
] | null | null | null |
descriptive/descriptive/doctype/industries/test_industries.py
|
ujjwalkumar93/descriptive
|
080c97f9bfcf4aec73d1400ca7ee346437ccdeef
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2022, k2s.co and Contributors
# See license.txt
# import frappe
import unittest
class TestIndustries(unittest.TestCase):
pass
| 16.111111
| 45
| 0.772414
| 19
| 145
| 5.894737
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040323
| 0.144828
| 145
| 8
| 46
| 18.125
| 0.862903
| 0.503448
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
047c4d5652b45cc16cd5c089998c1a48b9500c81
| 97,397
|
py
|
Python
|
raco/myrial/query_tests.py
|
uwescience/raco
|
1f2bedbef71bacf715340289f4973d85a3c1dc97
|
[
"BSD-3-Clause"
] | 61
|
2015-02-09T17:27:40.000Z
|
2022-03-28T14:37:53.000Z
|
raco/myrial/query_tests.py
|
uwescience/raco
|
1f2bedbef71bacf715340289f4973d85a3c1dc97
|
[
"BSD-3-Clause"
] | 201
|
2015-01-03T02:46:19.000Z
|
2017-09-19T02:16:36.000Z
|
raco/myrial/query_tests.py
|
uwescience/raco
|
1f2bedbef71bacf715340289f4973d85a3c1dc97
|
[
"BSD-3-Clause"
] | 17
|
2015-06-03T12:01:30.000Z
|
2021-11-27T15:49:21.000Z
|
# -*- coding: UTF-8 -*-
import collections
import math
import md5
from nose.tools import nottest
import raco.algebra
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.scheme as scheme
import raco.myrial.groupby
import raco.myrial.myrial_test as myrial_test
from raco.algebra import Apply
from raco import types
from raco.myrial.exceptions import *
from raco.expression import NestedAggregateException
from raco.fake_data import FakeData
from raco.types import LONG_TYPE
class TestQueryFunctions(myrial_test.MyrialTestCase, FakeData):
def setUp(self):
super(TestQueryFunctions, self).setUp()
self.db.add_function(TestQueryFunctions.test_function)
self.db.ingest(TestQueryFunctions.emp_key,
TestQueryFunctions.emp_table,
TestQueryFunctions.emp_schema)
self.db.ingest(TestQueryFunctions.dept_key,
TestQueryFunctions.dept_table,
TestQueryFunctions.dept_schema)
self.db.ingest(TestQueryFunctions.numbers_key,
TestQueryFunctions.numbers_table,
TestQueryFunctions.numbers_schema)
def test_scan_emp(self):
query = """
emp = SCAN(%s);
STORE(emp, OUTPUT);
""" % self.emp_key
self.check_result(query, self.emp_table)
def test_scan_dept(self):
query = """
dept = SCAN(%s);
STORE(dept, OUTPUT);
""" % self.dept_key
self.check_result(query, self.dept_table)
def test_bag_comp_emit_star(self):
query = """
emp = SCAN(%s);
bc = [FROM emp EMIT *];
STORE(bc, OUTPUT);
""" % self.emp_key
self.check_result(query, self.emp_table)
def test_bag_comp_emit_table_wildcard(self):
query = """
emp = SCAN(%s);
bc = [FROM emp EMIT emp.*];
STORE(bc, OUTPUT);
""" % self.emp_key
self.check_result(query, self.emp_table)
def test_hybrid_emit_clause(self):
query = """
emp = SCAN(%s);
dept = SCAN(%s);
x = [FROM dept, emp as X EMIT 5, X.salary * 2 AS k, X.*, *];
STORE(x, OUTPUT);
""" % (self.emp_key, self.dept_key)
expected = [(5, e[3] * 2) + e + d + e for e in self.emp_table
for d in self.dept_table]
self.check_result(query, collections.Counter(expected))
salary_filter_query = """
emp = SCAN(%s);
rich = [FROM emp WHERE %s > 25 * 10 * 10 * (5 + 5) EMIT *];
STORE(rich, OUTPUT);
"""
salary_expected_result = collections.Counter(
[x for x in FakeData.emp_table.elements() if x[3] > 25000])
def test_bag_comp_filter_large_salary_by_name(self):
query = TestQueryFunctions.salary_filter_query % (self.emp_key,
'salary')
self.check_result(query, TestQueryFunctions.salary_expected_result)
def test_bag_comp_filter_large_salary_by_position(self):
query = TestQueryFunctions.salary_filter_query % (self.emp_key, '$3')
self.check_result(query, TestQueryFunctions.salary_expected_result)
def test_bag_comp_filter_empty_result(self):
query = """
emp = SCAN(%s);
poor = [FROM emp WHERE $3 < (5 * 2) EMIT *];
STORE(poor, OUTPUT);
""" % self.emp_key
expected = collections.Counter()
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_ge(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE 2 * $1 >= $0 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if 2 * x[1] >= x[0]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_ge2(self):
query = u"""
emp = SCAN(%s);
out = [FROM emp WHERE 2 * $1 ≥ $0 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if 2 * x[1] >= x[0]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_le(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE $1 <= 2 * $0 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[1] <= 2 * x[0]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_le2(self):
query = u"""
emp = SCAN(%s);
out = [FROM emp WHERE $1 ≤ 2 * $0 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[1] <= 2 * x[0]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_gt(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE 2 * $1 > $0 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if 2 * x[1] > x[0]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_lt(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE $1 < 2 * $0 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[1] < 2 * x[0]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_eq(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE $0 * 2 == $1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[0] * 2 == x[1]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_ne(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE $0 // $1 != $1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[0] / x[1] != x[1]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_ne2(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE $0 // $1 <> $1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[0] / x[1] != x[1]])
self.check_result(query, expected)
def test_bag_comp_filter_column_compare_ne3(self):
query = u"""
emp = SCAN(%s);
out = [FROM emp WHERE $0 // $1 ≠ $1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[0] / x[1] != x[1]])
self.check_result(query, expected)
def test_bag_comp_filter_minus(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE $0 + -$1 == $1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[0] - x[1] == x[1]])
self.check_result(query, expected)
def test_bag_comp_filter_and(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE salary == 25000 AND id > dept_id EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[3] == 25000 and
x[0] > x[1]])
self.check_result(query, expected)
def test_bag_comp_filter_or(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE $3 > 25 * 1000 OR id > dept_id EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[3] > 25000 or
x[0] > x[1]])
self.check_result(query, expected)
def test_bag_comp_filter_not(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE not salary > 25000 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if not x[3] > 25000])
self.check_result(query, expected)
def test_bag_comp_filter_or_and(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE salary == 25000 OR salary == 5000 AND
dept_id == 1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[3] == 25000 or
(x[3] == 5000 and x[1] == 1)])
self.check_result(query, expected)
def test_bag_comp_filter_or_and_not(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE salary == 25000 OR NOT salary == 5000 AND
dept_id == 1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[3] == 25000 or not
x[3] == 5000 and x[1] == 1])
self.check_result(query, expected)
def test_bag_comp_emit_columns(self):
query = """
emp = SCAN(%s);
out = [FROM emp WHERE dept_id == 1 EMIT $2, salary AS salary];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[2], x[3]) for x in self.emp_table.elements() if x[1] == 1])
self.check_result(query, expected)
def test_bag_comp_emit_literal(self):
query = """
emp = SCAN(%s);
out = [FROM emp EMIT salary, "bugga bugga"];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[3], "bugga bugga") for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_bag_comp_emit_with_math(self):
query = """
emp = SCAN(%s);
out = [FROM emp EMIT salary + 5000, salary - 5000, salary // 5000,
salary * 5000];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[3] + 5000, x[3] - 5000, x[3] / 5000, x[3] * 5000)
for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_bag_comp_rename(self):
query = """
emp = SCAN(%s);
out = [FROM emp EMIT name, salary * 2 AS double_salary];
out = [FROM out WHERE double_salary > 10000 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[2], x[3] * 2) for x in self.emp_table.elements() if
x[3] * 2 > 10000])
self.check_result(query, expected)
join_expected = collections.Counter(
[('Bill Howe', 'human resources'),
('Dan Halperin', 'accounting'),
('Andrew Whitaker', 'accounting'),
('Shumo Chu', 'human resources'),
('Victor Almeida', 'accounting'),
('Dan Suciu', 'engineering'),
('Magdalena Balazinska', 'accounting')])
def test_explicit_join_unicode(self):
query = u"""
emp = SCAN(%s);
dept = SCAN(%s);
out = JOIN(emp, dept_id, dept, id);
out2 = [FROM out EMIT $2 AS emp_name, $5 AS dept_name];
STORE(out2, OUTPUT);
""" % (self.emp_key, self.dept_key)
self.check_result(query, self.join_expected)
def test_explicit_join(self):
query = """
emp = SCAN(%s);
dept = SCAN(%s);
out = JOIN(emp, dept_id, dept, id);
out2 = [FROM out EMIT $2 AS emp_name, $5 AS dept_name];
STORE(out2, OUTPUT);
""" % (self.emp_key, self.dept_key)
self.check_result(query, self.join_expected)
def test_explicit_join_twocols(self):
query = """
query = [1 as dept_id, 25000 as salary];
emp = SCAN({emp});
out = JOIN(query, (dept_id, salary), emp, (dept_id, salary));
out2 = [FROM out EMIT name];
STORE(out2, OUTPUT);
""".format(emp=self.emp_key)
expected = collections.Counter([('Victor Almeida',),
('Magdalena Balazinska',)])
self.check_result(query, expected)
def test_bagcomp_join_via_names(self):
query = """
out = [FROM SCAN(%s) E, SCAN(%s) AS D WHERE E.dept_id == D.id
EMIT E.name AS emp_name, D.name AS dept_name];
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
self.check_result(query, self.join_expected)
def test_bagcomp_join_via_pos(self):
query = """
E = SCAN(%s);
D = SCAN(%s);
out = [FROM E, D WHERE E.$1 == D.$0
EMIT E.name AS emp_name, D.$1 AS dept_name];
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
self.check_result(query, self.join_expected)
def test_two_column_join(self):
query = """
D = [1 as dept_id, 25000 as salary];
out = [FROM D, SCAN({emp}) E
WHERE E.dept_id == D.dept_id AND E.salary == D.salary
EMIT E.name AS emp_name];
STORE(out, OUTPUT);
""".format(emp=self.emp_key)
expected = collections.Counter([('Victor Almeida',),
('Magdalena Balazinska',)])
self.check_result(query, expected)
def test_join_with_select(self):
query = """
out = [FROM SCAN(%s) AS D, SCAN(%s) E
WHERE E.dept_id == D.id AND E.salary < 6000
EMIT E.name AS emp_name, D.name AS dept_name];
STORE(out, OUTPUT);
""" % (self.dept_key, self.emp_key)
expected = collections.Counter([('Andrew Whitaker', 'accounting'),
('Shumo Chu', 'human resources')])
self.check_result(query, expected)
def test_join_with_reordering(self):
# Try both FROM orders of the query and verify they both get the
# correct answer.
query = """
out = [FROM SCAN({d}) AS D, SCAN({e}) E
WHERE E.dept_id == D.id AND E.salary < 6000
EMIT E.name, D.id];
STORE(out, OUTPUT);
""".format(d=self.dept_key, e=self.emp_key)
expected = collections.Counter([('Andrew Whitaker', 1),
('Shumo Chu', 2)])
self.check_result(query, expected)
# Swap E and D
query = """
out = [FROM SCAN({e}) E, SCAN({d}) AS D
WHERE E.dept_id == D.id AND E.salary < 6000
EMIT E.name, D.id];
STORE(out, OUTPUT);
""".format(d=self.dept_key, e=self.emp_key)
expected = collections.Counter([('Andrew Whitaker', 1),
('Shumo Chu', 2)])
self.check_result(query, expected)
def test_sql_join(self):
"""SQL-style select-from-where join"""
query = """
E = SCAN(%s);
D = SCAN(%s);
out = SELECT E.name, D.name FROM E, D WHERE E.dept_id = D.id;
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
self.check_result(query, self.join_expected)
def test_bagcomp_nested_sql(self):
"""Test nesting SQL inside a bag comprehension"""
query = """
out = [FROM (SELECT name, salary
FROM SCAN(%s) AS X
WHERE salary > 5000) AS Y
WHERE salary < 80000
EMIT *];
STORE(out, OUTPUT);
""" % (self.emp_key,)
tuples = [(e[2], e[3]) for e in self.emp_table.elements()
if e[3] < 80000 and e[3] > 5000]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_sql_nested_sql(self):
"""Test nesting SQL inside SQL"""
query = """
out = SELECT Y.name, Y.salary
FROM (SELECT name, salary
FROM SCAN(%s) AS X
WHERE salary > 5000) AS Y
WHERE Y.salary < 80000;
STORE(out, OUTPUT);
""" % (self.emp_key,)
tuples = [(e[2], e[3]) for e in self.emp_table.elements()
if e[3] < 80000 and e[3] > 5000]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_sql_nested_bagcomp(self):
"""Test nesting a bag comprehension inside SQL"""
query = """
out = SELECT Y.name, Y.salary FROM
[FROM SCAN(%s) AS X WHERE salary > 5000 EMIT X.*] AS Y
WHERE Y.salary < 80000;
STORE(out, OUTPUT);
""" % (self.emp_key,)
tuples = [(e[2], e[3]) for e in self.emp_table.elements()
if e[3] < 80000 and e[3] > 5000]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_bagcomp_projection(self):
"""Test that column names are preserved across projection."""
query = """
E = SCAN(%s);
F = [FROM E EMIT $2];
out = [FROM F EMIT name];
STORE(out, OUTPUT);
""" % (self.emp_key,)
tpls = [tuple([x[2]]) for x in self.emp_table]
expected = collections.Counter(tpls)
self.check_result(query, expected)
def test_bagcomp_no_column_name(self):
"""Test that the system handles an omitted output column name."""
query = """
E = SCAN(%s);
F = [FROM E EMIT salary*E.salary];
out = [FROM F EMIT $0];
STORE(out, OUTPUT);
""" % (self.emp_key,)
tpls = [tuple([x[3] * x[3]]) for x in self.emp_table]
expected = collections.Counter(tpls)
self.check_result(query, expected)
def test_explicit_cross(self):
query = """
out = CROSS(SCAN(%s), SCAN(%s));
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
tuples = [e + d for e in self.emp_table.elements() for
d in self.dept_table.elements()]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_bagcomp_cross(self):
query = """
out = [FROM SCAN(%s) E, SCAN(%s) AS D EMIT *];
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
tuples = [e + d for e in self.emp_table.elements() for
d in self.dept_table.elements()]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_distinct(self):
query = """
out = DISTINCT([FROM SCAN(%s) AS X EMIT salary]);
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(25000,), (5000,), (90000,)])
self.check_result(query, expected)
def test_sql_distinct(self):
query = """
out = SELECT DISTINCT salary AS salary FROM SCAN(%s) AS X;
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(set([(x[3],) for x in self.emp_table]))
self.check_result(query, expected)
def test_sql_repeated(self):
query = """
out = SELECT salary AS salary FROM SCAN(%s) AS X;
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(x[3],) for x in self.emp_table])
self.check_result(query, expected)
def test_limit_without_orderby_assert(self):
query = """
out = LIMIT(SCAN(%s), 3);
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(Exception): # noqa
self.check_result(query, None)
def test_orderby_without_limit_assert(self):
query = """
out = SELECT * FROM SCAN(%s) as X ORDER BY $0;
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(Exception): # noqa
self.check_result(query, None)
def test_limit_orderby(self):
query = """
out = [FROM SCAN(%s) as X EMIT * ORDER BY $0 ASC LIMIT 3];
STORE(out, OUTPUT);
""" % self.emp_key
result = self.execute_query(query)
expectedResult = collections.Counter(
sorted(self.emp_table.elements(), key=lambda emp: emp[0])[:3])
self.assertEquals(result, expectedResult)
def test_sql_limit_orderby(self):
query = """
out = SELECT * FROM SCAN(%s) as X ORDER BY $0 ASC LIMIT 3;
STORE(out, OUTPUT);
""" % self.emp_key
result = self.execute_query(query)
expectedResult = collections.Counter(
sorted(self.emp_table.elements(), key=lambda emp: emp[0])[:3])
self.assertEquals(result, expectedResult)
def test_limit_orderby_multikey(self):
query = """
out = [FROM SCAN(%s) as X EMIT *
ORDER BY $1 ASC, $3 DESC, $2 ASC
LIMIT 3];
STORE(out, OUTPUT);
""" % self.emp_key
result = self.execute_query(query)
firstSort = sorted(self.emp_table.elements(), key=lambda emp: emp[2])
secondSort = sorted(firstSort, key=lambda emp: emp[3], reverse=True)
thirdSortLimit = sorted(secondSort, key=lambda emp: emp[1])[:3]
expectedResult = collections.Counter(thirdSortLimit)
self.assertEquals(result, expectedResult)
def test_sql_limit_orderby_multikey(self):
query = """
out = SELECT * FROM SCAN(%s) as X
ORDER BY $1 ASC, $3 DESC, $2 ASC
LIMIT 3;
STORE(out, OUTPUT);
""" % self.emp_key
result = self.execute_query(query)
firstSort = sorted(self.emp_table.elements(), key=lambda emp: emp[2])
secondSort = sorted(firstSort, key=lambda emp: emp[3], reverse=True)
thirdSortLimit = sorted(secondSort, key=lambda emp: emp[1])[:3]
expectedResult = collections.Counter(thirdSortLimit)
self.assertEquals(result, expectedResult)
def test_table_literal_boolean(self):
query = """
X = [truE as MyTrue, FaLse as MyFalse];
Y = [FROM scan(%s) as E, X where X.MyTrue emit *];
STORE(Y, OUTPUT);
""" % self.emp_key
res = [x + (True, False) for x in self.emp_table]
self.check_result(query, collections.Counter(res))
def test_table_literal_scalar_expression(self):
query = """
X = [FROM ["Andrew", (50 * (500 + 500)) AS salary] Z EMIT salary];
STORE(X, OUTPUT);
"""
expected = collections.Counter([(50000,)])
self.check_result(query, expected)
def test_table_literal_unbox(self):
query = """
A = [1 AS one, 2 AS two, 3 AS three];
B = [1 AS one, 2 AS two, 3 AS three];
C = [*A.two * *B.three];
STORE(C, OUTPUT);
"""
expected = collections.Counter([(6,)])
self.check_result(query, expected)
def test_unbox_from_where_single(self):
query = """
TH = [25 * 1000];
emp = SCAN(%s);
out = [FROM emp WHERE $3 > *TH EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[3] > 25000])
self.check_result(query, expected)
def test_unbox_from_where_multi(self):
query = """
TWO = [2];
FOUR = [4];
EIGHT = [8];
emp = SCAN(%s);
out = [FROM emp WHERE *EIGHT == *TWO**FOUR EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.emp_table)
def test_unbox_from_where_nary_name(self):
query = """
_CONST = [25 AS twenty_five, 1000 AS thousand];
emp = SCAN(%s);
out = [FROM emp WHERE salary == *_CONST.twenty_five *
*_CONST.thousand EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[3] == 25000])
self.check_result(query, expected)
def test_unbox_from_where_nary_pos(self):
query = """
_CONST = [25 AS twenty_five, 1000 AS thousand];
emp = SCAN(%s);
out = [FROM emp WHERE salary == *_CONST.$0 *
*_CONST.$1 EMIT *];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[x for x in self.emp_table.elements() if x[3] == 25000])
self.check_result(query, expected)
def test_unbox_from_emit_single(self):
query = """
THOUSAND = [1000];
emp = SCAN(%s);
out = [FROM emp EMIT salary * *THOUSAND AS salary];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[3] * 1000,) for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_unbox_kitchen_sink(self):
query = """
C1 = [25 AS a, 100 AS b];
C2 = [50 AS a, 1000 AS b];
emp = SCAN(%s);
out = [FROM emp WHERE salary==*C1.a * *C2.b OR $3==*C1.b * *C2
EMIT dept_id * *C1.b // *C2.a];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[1] * 2,) for x in self.emp_table.elements() if
x[3] == 5000 or x[3] == 25000])
self.check_result(query, expected)
def test_unbox_arbitrary_expression(self):
query = """
emp = SCAN(%s);
dept = SCAN(%s);
out = [FROM emp, COUNTALL(dept) as size WHERE id > *size EMIT emp.id];
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
expected = collections.Counter(
[(x[0],) for x in self.emp_table.elements() if
x[0] > len(self.dept_table)])
self.check_result(query, expected)
def test_inline_table_literal(self):
query = """
emp = SCAN(%s);
dept = SCAN(%s);
out = [FROM emp, [1,2,3] as tl WHERE id > tl.$2 EMIT emp.id];
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
expected = collections.Counter(
[(x[0],) for x in self.emp_table.elements() if
x[0] > 3])
self.check_result(query, expected)
def __aggregate_expected_result(self, apply_func, grouping_col=1,
agg_col=3):
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[grouping_col]].append(t[agg_col])
tuples = [(key, apply_func(values)) for key, values in
result_dict.iteritems()]
return collections.Counter(tuples)
def test_max(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id, MAX(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(max))
def test_min(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id, MIN(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(min))
def test_sum(self):
query = """
out = [FROM SCAN(%s) as X EMIT dept_id, SUM(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(sum))
def test_avg(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id, AVG(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
def avg(it):
sum = 0
cnt = 0
for val in it:
sum += val
cnt += 1
return sum / cnt
self.check_result(query, self.__aggregate_expected_result(avg))
self.check_result(query, self.__aggregate_expected_result(avg),
test_logical=True)
def test_stdev(self):
query = """
out = [FROM SCAN(%s) AS X EMIT STDEV(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
res = self.execute_query(query)
tp = res.elements().next()
self.assertAlmostEqual(tp[0], 34001.8006726)
res = self.execute_query(query, test_logical=True)
tp = res.elements().next()
self.assertAlmostEqual(tp[0], 34001.8006726)
def test_count(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id, COUNT(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(len))
self.check_result(query, self.__aggregate_expected_result(len),
test_logical=True)
def test_countall(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id, COUNTALL()];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(len))
self.check_result(query, self.__aggregate_expected_result(len),
test_logical=True)
def test_count_star(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id, COUNT(*)];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(len))
self.check_result(query, self.__aggregate_expected_result(len),
test_logical=True)
def test_count_star_sql(self):
query = """
out = SELECT dept_id, COUNT(*) FROM SCAN(%s) AS X;
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(len))
self.check_result(query, self.__aggregate_expected_result(len),
test_logical=True)
def test_max_reversed(self):
query = """
out = [FROM SCAN(%s) AS X EMIT MAX(salary) AS max_salary, dept_id];
STORE(out, OUTPUT);
""" % self.emp_key
ex = self.__aggregate_expected_result(max)
ex = collections.Counter([(y, x) for (x, y) in ex])
self.check_result(query, ex)
self.check_result(query, ex, test_logical=True)
def test_compound_aggregate(self):
query = """
out = [FROM SCAN(%s) AS X
EMIT (2 * (MAX(salary) - MIN(salary))) AS range,
dept_id AS did];
out = [FROM out EMIT did AS dept_id, range AS rng];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(t[3])
tuples = [(key, 2 * (max(values) - min(values))) for key, values in
result_dict.iteritems()]
expected = collections.Counter(tuples)
self.check_result(query, expected)
self.check_result(query, expected, test_logical=True)
def test_aggregate_with_unbox(self):
query = """
C = [1 AS one, 2 AS two];
out = [FROM SCAN(%s) AS X
EMIT MAX(*C.two * salary) - MIN( *C.$1 * salary) AS range,
dept_id AS did];
out = [FROM out EMIT did AS dept_id, range AS rng];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(2 * t[3])
tuples = [(key, (max(values) - min(values))) for key, values in
result_dict.iteritems()]
expected = collections.Counter(tuples)
self.check_result(query, expected)
self.check_result(query, expected, test_logical=True)
def test_nary_groupby(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id, salary, COUNT(name)];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[(t[1], t[3])].append(t[2])
tuples = [key + (len(values),)
for key, values in result_dict.iteritems()]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_empty_groupby(self):
query = """
out = [FROM SCAN(%s) AS X EMIT MAX(salary), COUNT($0), MIN(dept_id*4)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(90000, len(self.emp_table), 4)])
self.check_result(query, expected)
def test_compound_groupby(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id+dept_id, AVG(salary), COUNT(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[0] + t[1]].append(t[3])
tuples1 = [(key, sum(values), len(values)) for key, values
in result_dict.iteritems()]
tuples2 = [(t[0], t[1] / t[2], t[2]) for t in tuples1]
expected = collections.Counter(tuples2)
self.check_result(query, expected)
def test_impure_aggregate_colref(self):
"""Test of aggregate column that refers to a grouping column"""
query = """
out = [FROM SCAN(%s) AS X EMIT
( X.dept_id + (MAX(X.salary) - MIN(X.salary))) AS val,
X.dept_id AS did];
out = [FROM out EMIT did AS dept_id, val AS rng];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(t[3])
tuples = [(key, key + (max(values) - min(values))) for key, values in
result_dict.iteritems()]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_impure_aggregate_unbox(self):
"""Test of an aggregate column that contains an unbox."""
query = """
TWO = [2];
out = [FROM SCAN(%s) AS X
EMIT (*TWO * (MAX(salary) - MIN(salary))) AS range,
dept_id AS did];
out = [FROM out EMIT did AS dept_id, range AS rng];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(t[3])
tuples = [(key, 2 * (max(values) - min(values))) for key, values in
result_dict.iteritems()]
expected = collections.Counter(tuples)
self.check_result(query, expected)
def test_aggregate_illegal_colref(self):
query = """
out = [FROM SCAN(%s) AS X EMIT
X.dept_id + COUNT(X.salary) AS val];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(raco.myrial.groupby.NonGroupedAccessException): # noqa
self.check_result(query, None)
def test_nested_aggregates_are_illegal(self):
query = """
out = [FROM SCAN(%s) AS X
EMIT id+dept_id, MIN(53 + MAX(salary)) AS foo];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedAggregateException):
self.check_result(query, collections.Counter())
def test_standalone_countall(self):
query = """
out = COUNTALL(SCAN(%s));
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(len(self.emp_table),)])
self.check_result(query, expected)
def test_multiway_bagcomp_with_unbox(self):
"""Return all employees in accounting making less than 30000"""
query = """
Salary = [30000];
Dept = ["accounting"];
out = [FROM SCAN(%s) AS E, SCAN(%s) AS D
WHERE E.dept_id == D.id AND D.name == *Dept
AND E.salary < *Salary EMIT E.$2 AS name];
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
expected = collections.Counter([
("Andrew Whitaker",),
("Victor Almeida",),
("Magdalena Balazinska",)])
self.check_result(query, expected)
def test_duplicate_bagcomp_aliases_are_illegal(self):
query = """
X = SCAN(%s);
out = [FROM X, X EMIT *];
STORE(out, OUTPUT);
""" % (self.emp_key,)
with self.assertRaises(interpreter.DuplicateAliasException):
self.check_result(query, collections.Counter())
def test_bagcomp_column_index_out_of_bounds(self):
query = """
E = SCAN(%s);
D = SCAN(%s);
out = [FROM E, D WHERE E.$1 == D.$77
EMIT E.name AS emp_name, D.$1 AS dept_name];
STORE(out, OUTPUT);
""" % (self.emp_key, self.dept_key)
with self.assertRaises(ColumnIndexOutOfBounds):
self.check_result(query, collections.Counter())
def test_abs(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, ABS(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, abs(b)) for a, b in self.numbers_table.elements()])
self.check_result(query, expected)
def test_ceil(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, CEIL(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, math.ceil(b)) for a, b in self.numbers_table.elements()])
self.check_result(query, expected)
def test_cos(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, COS(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, math.cos(b)) for a, b in self.numbers_table.elements()])
self.check_result(query, expected)
def test_floor(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, FLOOR(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, math.floor(b)) for a, b in self.numbers_table.elements()])
self.check_result(query, expected)
def test_log(self):
query = """
out = [FROM SCAN(%s) AS X WHERE val > 0 EMIT id, LOG(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, math.log(b)) for a, b in self.numbers_table.elements()
if b > 0])
self.check_result(query, expected)
def test_sin(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, SIN(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, math.sin(b)) for a, b in self.numbers_table.elements()])
self.check_result(query, expected)
def test_sqrt(self):
query = """
out = [FROM SCAN(%s) X WHERE val >= 0 EMIT id, SQRT(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, math.sqrt(b)) for a, b in self.numbers_table.elements()
if b >= 0])
self.check_result(query, expected)
def test_tan(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, TAN(val)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, math.tan(b)) for a, b in self.numbers_table.elements()])
self.check_result(query, expected)
def test_md5(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, md5(name)];
STORE(out, OUTPUT);
""" % self.emp_key
def md5_as_long(x):
m = md5.new()
m.update(x)
return int(m.hexdigest(), 16) >> 64
expected = collections.Counter(
[(x[0], md5_as_long(x[2])) for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_pow(self):
query = """
THREE = [3];
out = [FROM SCAN(%s) X EMIT id, POW(X.val, *THREE)];
STORE(out, OUTPUT);
""" % self.numbers_key
expected = collections.Counter(
[(a, pow(b, 3)) for a, b in self.numbers_table.elements()])
self.check_result(query, expected)
def test_no_such_relation(self):
query = """
out = [FROM SCAN(foo:bar:baz) x EMIT id, TAN(val)];
STORE(out, OUTPUT);
"""
with self.assertRaises(NoSuchRelationException):
self.check_result(query, collections.Counter())
def test_bad_relation_name(self):
query = """
y = empty(a:int);
z = [from s y -- bug: s does not exist
emit y.a];
store(z, debug);
"""
with self.assertRaises(NoSuchRelationException):
self.check_result(query, collections.Counter())
def test_bad_alias(self):
query = """
y = empty(a:int);
z = [from y s -- bug: extra s
emit y.a];
store(z, debug);
"""
with self.assertRaises(NoSuchRelationException):
self.check_result(query, collections.Counter())
def test_bad_alias_wildcard(self):
query = """
y = empty(a:int);
z = [from y s -- bug: errant s
emit y.*];
store(z, debug);
"""
with self.assertRaises(NoSuchRelationException):
self.check_result(query, collections.Counter())
def test_scan_error(self):
query = """
out = [FROM SCAN(%s) AS X EMIT id, !!!FROG(val)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(MyrialCompileException):
self.check_result(query, collections.Counter())
def test_relation_scope_error(self):
query = """
out = [FROM EMPTY(x:INT) AS X EMIT z.*];
STORE(out, OUTPUT);
"""
with self.assertRaises(NoSuchRelationException):
self.check_result(query, collections.Counter())
def test_relation_scope_error2(self):
query = """
z = EMPTY(z:INT);
out = [FROM EMPTY(x:INT) AS X EMIT z.*];
STORE(out, OUTPUT);
"""
with self.assertRaises(NoSuchRelationException):
self.check_result(query, collections.Counter())
def test_parse_error(self):
query = """
out = [FROM SCAN(%s) AS X EMIT $(val)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(MyrialCompileException):
self.check_result(query, collections.Counter())
def test_no_such_udf(self):
query = """
out = [FROM SCAN(%s) AS X EMIT FooFunction(X.salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NoSuchFunctionException):
self.check_result(query, collections.Counter())
def test_reserved_udf(self):
query = """
DEF avg(x, y): (x + y) / 2;
out = [FROM SCAN(%s) AS X EMIT avg(X.salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(ReservedTokenException):
self.check_result(query, collections.Counter())
def test_duplicate_udf(self):
query = """
DEF foo(x, y): x + y;
DEF bar(): 7;
DEF foo(x): -1 * x;
out = [FROM SCAN(%s) AS X EMIT foo(X.salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(DuplicateFunctionDefinitionException):
self.check_result(query, collections.Counter())
def test_invalid_argument_udf(self):
query = """
DEF Foo(x, y): cos(x) * sin(y);
out = [FROM SCAN(%s) AS X EMIT Foo(X.salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(InvalidArgumentList):
self.check_result(query, collections.Counter())
def test_undefined_variable_udf(self):
query = """
DEF Foo(x, y): cos(x) * sin(z);
out = [FROM SCAN(%s) AS X EMIT Foo(X.salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(UndefinedVariableException):
self.check_result(query, collections.Counter())
def test_duplicate_variable_udf(self):
query = """
DEF Foo(x, x): cos(x) * sin(x);
out = [FROM SCAN(%s) AS X EMIT Foo(X.salary, X.dept_id)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(DuplicateVariableException):
self.check_result(query, collections.Counter())
def test_nary_udf(self):
query = """
DEF Foo(a,b): [a + b, a - b];
out = [FROM SCAN(%s) AS X EMIT id, Foo(salary, dept_id) as [x, y]];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(t[0], t[1] + t[3], t[3] - t[1])
for t in self.emp_table])
self.check_result(query, expected)
def test_nary_udf_name_count(self):
query = """
DEF Foo(a,b): [a + b, a - b];
out = [FROM SCAN(%s) AS X EMIT id, Foo(salary, dept_id) as [x, y, z]];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(IllegalColumnNamesException):
self.check_result(query, None)
def test_nary_udf_illegal_nesting(self):
query = """
DEF Foo(x): [x + 3, x - 3];
DEF Bar(a,b): [Foo(x), Foo(b)];
out = [FROM SCAN(%s) AS X EMIT id, Bar(salary, dept_id) as [x, y]];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedTupleExpressionException):
self.check_result(query, None)
def test_nary_udf_illegal_wildcard(self):
query = """
DEF Foo(x): [x + 3, *];
out = [FROM SCAN(%s) AS X EMIT id, Foo(salary, dept_id) as [x, y]];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(IllegalWildcardException):
self.check_result(query, None)
def test_triangle_udf(self):
query = """
DEF Triangle(a,b): (a*b)//2;
out = [FROM SCAN(%s) AS X EMIT id, Triangle(X.salary, dept_id) AS t];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(t[0], t[1] * t[3] / 2) for t in self.emp_table]) # noqa
self.check_result(query, expected)
def test_noop_udf(self):
expr = "30 + 15 // 7 + -45"
query = """
DEF Noop(): %s;
out = [Noop() AS t];
STORE(out, OUTPUT);
""" % expr
val = eval(expr)
expected = collections.Counter([(val,)])
self.check_result(query, expected)
def test_const(self):
expr = "30 + 15 // 7 + -45"
query = """
CONST myconstant: %s;
out = [myconstant AS t];
STORE(out, OUTPUT);
""" % expr
val = eval(expr)
expected = collections.Counter([(val,)])
self.check_result(query, expected)
def test_composition_udf(self):
query = """
DEF Add7(x): x + 7;
DEF Add6(x): x + 6;
out = [FROM SCAN(%s) AS X EMIT id, Add6(Add7(Add6(X.salary)))];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(t[0], t[3] + 19)
for t in self.emp_table])
self.check_result(query, expected)
def test_nested_udf(self):
query = """
DEF Add7(x): x + 7;
DEF Add10(x): Add7(x) + 3;
out = [FROM SCAN(%s) AS X EMIT id, Add10(X.salary)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(t[0], t[3] + 10)
for t in self.emp_table])
self.check_result(query, expected)
def test_regression_150(self):
"""Repeated invocation of a UDF."""
query = """
DEF transform(x): pow(10, x/pow(2,16)*3.5);
out = [FROM SCAN(%s) AS X EMIT id, transform(salary),
transform(dept_id)];
STORE(out, OUTPUT);
""" % self.emp_key
def tx(x):
return pow(10, float(x) / pow(2, 16) * 3.5)
expected = collections.Counter([(t[0], tx(t[3]), tx(t[1]))
for t in self.emp_table])
self.check_result(query, expected)
def test_safediv_2_function(self):
query = """
out = [FROM SCAN(%s) AS X EMIT SafeDiv(X.salary,X.dept_id-1)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(t[3] / (t[1] - 1) if t[1] - 1 > 0 else 0,)
for t in self.emp_table])
self.check_result(query, expected)
def test_safediv_3_function(self):
query = """
out = [FROM SCAN(%s) AS X EMIT SafeDiv(X.salary,X.dept_id-1,42)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(t[3] / (t[1] - 1) if t[1] - 1 > 0 else 42,)
for t in self.emp_table])
self.check_result(query, expected)
def test_answer_to_everything_function(self):
query = """
out = [TheAnswerToLifeTheUniverseAndEverything()];
STORE(out, OUTPUT);
"""
expected = collections.Counter([(42,)])
self.check_result(query, expected)
def test_least_function(self):
query = """
out = [FROM SCAN(%s) AS X EMIT least(X.id,X.dept_id,1)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(min(t[0], t[1], 1),)
for t in self.emp_table])
self.check_result(query, expected)
def test_greatest_function(self):
query = """
out = [FROM SCAN(%s) AS X EMIT greatest(X.id,X.dept_id,3)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(max(t[0], t[1], 3),)
for t in self.emp_table])
self.check_result(query, expected)
def test_lesser_function(self):
query = """
out = [FROM SCAN(%s) AS X EMIT lesser(X.id,X.dept_id)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(min(t[0], t[1]),)
for t in self.emp_table])
self.check_result(query, expected)
def test_greater_function(self):
query = """
out = [FROM SCAN(%s) AS X EMIT greater(X.id,X.dept_id)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(max(t[0], t[1],),)
for t in self.emp_table])
self.check_result(query, expected)
def test_uda_illegal_init(self):
query = """
uda Foo(x,y) {
[0 as A, *];
[A + x, A + y];
A;
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Foo(salary, id)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(IllegalWildcardException):
self.check_result(query, None)
def test_uda_illegal_update(self):
query = """
uda Foo(x,y) {
[0 as A, 1 as B];
[A + x + y, *];
A + B;
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Foo(salary, id)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(MyrialCompileException):
self.check_result(query, None)
def test_uda_nested_emitter(self):
query = """
uda Foo(x) {
[0 as A];
[A + x];
[A];
};
uda Bar(x) {
[0 as B];
[B + x];
Foo(B);
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Bar(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedAggregateException):
self.check_result(query, None)
def test_uda_nested_init(self):
query = """
uda Foo(x) {
[0 as A];
[A + x];
[A];
};
uda Bar(x) {
[Foo(0) as B];
[B + x];
B;
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Bar(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedAggregateException):
self.check_result(query, None)
def test_uda_nested_update(self):
query = """
uda Foo(x) {
[0 as A];
[A + x];
[A];
};
uda Bar(x) {
[0 as B];
[Foo(B)];
B;
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Bar(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedAggregateException):
self.check_result(query, None)
def test_uda_unary_emit_arg_list(self):
query = """
uda MyAvg(val) {
[0 as _sum, 0 as _count];
[_sum + val, _count + 1];
[_sum / _count];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, MyAvg(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
def agg_func(x):
return float(sum(x)) / len(x)
expected = self.__aggregate_expected_result(agg_func)
self.check_result(query, expected)
def test_second_max_uda(self):
"""UDA to compute the second largest element in a collection."""
query = """
uda SecondMax(val) {
[0 as _max, 0 as second_max];
[case when val > _max then val else _max end,
case when val > _max then _max when val > second_max then val
else second_max end];
second_max;
};
out = [FROM SCAN(%s) AS X EMIT dept_id, SecondMax(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
def agg_func(x):
if len(x) < 2:
return 0
else:
return sorted(x, reverse=True)[1]
expected = self.__aggregate_expected_result(agg_func)
self.check_result(query, expected)
def test_multi_invocation_uda(self):
query = """
uda MaxDivMin(val) {
[9999999 as _min, 0 as _max];
[case when val < _min then val else _min end,
case when val > _max then val else _max end];
_max / _min;
};
out = [FROM SCAN(%s) AS X EMIT
MaxDivMin(id) + dept_id + MaxDivMin(salary), dept_id];
STORE(out, OUTPUT);
""" % self.emp_key
d = collections.defaultdict(list)
for t in self.emp_table.elements():
d[t[1]].append(t)
results = []
for k, tpls in d.iteritems():
max_salary = max(t[3] for t in tpls)
min_salary = min(t[3] for t in tpls)
max_id = max(t[0] for t in tpls)
min_id = min(t[0] for t in tpls)
results.append((k + float(max_salary) / min_salary +
float(max_id) / min_id, k))
self.check_result(query, collections.Counter(results))
def test_multiple_uda(self):
query = """
uda MyMax1(val) {
[0 as _max];
[case when val > _max then val else _max end];
_max;
};
uda MyMax2(val) {
[0 as _max];
[case when val > _max then val else _max end];
_max;
};
out = [FROM SCAN(%s) AS X EMIT dept_id, MyMax1(salary), MyMax2(id)];
STORE(out, OUTPUT);
""" % self.emp_key
d = collections.defaultdict(list)
for t in self.emp_table.elements():
d[t[1]].append(t)
results = []
for k, tpls in d.iteritems():
max_salary = max(t[3] for t in tpls)
max_id = max(t[0] for t in tpls)
results.append((k, max_salary, max_id))
self.check_result(query, collections.Counter(results))
def test_uda_no_emit_clause(self):
query = """
uda MyCount() {
[0 as _count];
[_count + 1];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, MyCount()];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(len))
def test_uda_no_emit_clause_many_cols(self):
query = """
uda MyAggs(x) {
[0 as _count, 0 as _sum, 0 as _sumsq];
[_count + 1, _sum + x, _sumsq + x*x];
};
out = [FROM SCAN(%s) AS X EMIT MyAggs(salary) as [a, b, c]];
STORE(out, OUTPUT);
""" % self.emp_key
c = len(list(self.emp_table.elements()))
s = sum(d for a, b, c, d in self.emp_table.elements())
sq = sum(d * d for a, b, c, d in self.emp_table.elements())
expected = collections.Counter([(c, s, sq)])
self.check_result(query, expected)
# Test with two different column orders in case the undefined
# order used by Python is correct by chance.
query = """
uda MyAggs(x) {
[0 as _count, 0 as _sumsq, 0 as _sum];
[_count + 1, _sumsq + x*x, _sum + x];
};
out = [FROM SCAN(%s) AS X EMIT MyAggs(salary) as [a, b, c]];
STORE(out, OUTPUT);
""" % self.emp_key
c = len(list(self.emp_table.elements()))
sq = sum(d * d for a, b, c, d in self.emp_table.elements())
s = sum(d for a, b, c, d in self.emp_table.elements())
expected = collections.Counter([(c, sq, s)])
self.check_result(query, expected)
def test_uda_with_udf(self):
query = """
def foo(x, y): x + y;
uda max2(x, y) {
[0 as _max];
[case when foo(x, y) > _max then foo(x, y) else _max end];
_max;
};
out = [FROM SCAN(%s) AS X EMIT dept_id, max2(salary, id)];
STORE(out, OUTPUT);
""" % self.emp_key
d = collections.defaultdict(list)
for t in self.emp_table.elements():
d[t[1]].append(t)
results = []
for k, tpls in d.iteritems():
results.append((k, max(t[3] + t[0] for t in tpls)))
self.check_result(query, collections.Counter(results))
def test_uda_with_subsequent_project_0(self):
query = """
def foo(x, y): x + y;
uda max2(x, y) {
[0 as _max];
[case when foo(x, y) > _max then foo(x, y) else _max end];
_max;
};
inter = [FROM SCAN(%s) AS X EMIT dept_id, max2(salary, id)];
out = [from inter emit $0];
STORE(out, OUTPUT);
""" % self.emp_key
d = collections.defaultdict(list)
for t in self.emp_table.elements():
d[t[1]].append(t)
results = []
for k, tpls in d.iteritems():
results.append((k, max(t[3] + t[0] for t in tpls)))
results = [(t[0],) for t in results]
self.check_result(query, collections.Counter(results))
def test_uda_with_subsequent_project_1(self):
query = """
def foo(x, y): x + y;
uda max2(x, y) {
[0 as _max];
[case when foo(x, y) > _max then foo(x, y) else _max end];
_max;
};
inter = [FROM SCAN(%s) AS X EMIT dept_id, max2(salary, id)];
out = [from inter emit $1];
STORE(out, OUTPUT);
""" % self.emp_key
d = collections.defaultdict(list)
for t in self.emp_table.elements():
d[t[1]].append(t)
results = []
for k, tpls in d.iteritems():
results.append((k, max(t[3] + t[0] for t in tpls)))
results = [(t[1],) for t in results]
self.check_result(query, collections.Counter(results))
def test_uda_with_subsequent_project_2(self):
query = """
def foo(x, y): x + y;
uda max2(x, y) {
[0 as _max];
[case when foo(x, y) > _max then foo(x, y) else _max end];
_max;
};
inter = [FROM SCAN(%s) AS X EMIT dept_id, max2(salary, id)
, max2(dept_id, id)];
out = [from inter emit $1];
STORE(out, OUTPUT);
""" % self.emp_key
d = collections.defaultdict(list)
for t in self.emp_table.elements():
d[t[1]].append(t)
results = []
for k, tpls in d.iteritems():
results.append((k,
max(t[3] + t[0] for t in tpls),
max(t[1] + t[0] for t in tpls)))
results = [(t[1],) for t in results]
self.check_result(query, collections.Counter(results))
def __run_multiple_emitter_test(self, include_column_names):
if include_column_names:
names = " AS [mysum, mycount, myavg]"
else:
names = ""
query = """
uda SumCountMean(x) {
[0 as _sum, 0 as _count];
[_sum + x, _count + 1];
[_sum, _count, _sum/_count];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, SumCountMean(salary) %s,
dept_id+3, max(id) as max_id];
STORE(out, OUTPUT);
""" % (self.emp_key, names)
d = collections.defaultdict(list)
for t in self.emp_table.elements():
d[t[1]].append(t)
results = []
for k, tpls in d.iteritems():
_sum = sum(x[3] for x in tpls)
_count = len(tpls)
_avg = float(_sum) / _count
_max_id = max(x[0] for x in tpls)
results.append((k, _sum, _count, _avg, k + 3, _max_id))
self.check_result(query, collections.Counter(results))
def test_uda_multiple_emitters_default_names(self):
self.__run_multiple_emitter_test(False)
def test_uda_multiple_emitters_provided_names(self):
self.__run_multiple_emitter_test(True)
scheme_actual = self.db.get_scheme('OUTPUT')
scheme_expected = scheme.Scheme([
('dept_id', types.LONG_TYPE), ('mysum', types.LONG_TYPE),
('mycount', types.LONG_TYPE), ('myavg', types.FLOAT_TYPE),
('_COLUMN4_', types.LONG_TYPE), ('max_id', types.LONG_TYPE)])
self.assertEquals(scheme_actual, scheme_expected)
def test_emit_arg_bad_column_name_length(self):
query = """
out = [FROM SCAN(%s) AS X EMIT dept_id AS [dept_id1, dept_id2]];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(IllegalColumnNamesException):
self.check_result(query, None)
def test_uda_bad_column_name_length(self):
query = """
uda Fubar(x, y, z) {
[0 as Q];
[Q + 1];
[1,2,3];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Fubar(1, salary, id)
AS [A, B, C, D]];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(IllegalColumnNamesException):
self.check_result(query, None)
def test_uda_init_tuple_valued(self):
query = """
uda Foo(x) {
[0 as Q];
[Q + 1];
[1,2,3];
};
uda Bar(x) {
[Foo(0) as [A, B, C]];
[Q * 8];
[1,2,3];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Bar(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedTupleExpressionException):
self.check_result(query, None)
def test_uda_update_tuple_valued(self):
query = """
uda Foo(x) {
[0 as Q];
[Q + 1];
[1,2,3];
};
uda Bar(x) {
[0 as Q];
[Foo(Q + 1)];
[1,2,3];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Bar(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedTupleExpressionException):
self.check_result(query, None)
def test_uda_result_tuple_valued(self):
query = """
uda Foo(x) {
[0 as Q];
[Q + 1];
[1,2,3];
};
uda Bar(x) {
[0 as Q];
[Q + 2];
[1,2,Foo(3)];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, Bar(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedTupleExpressionException):
self.check_result(query, None)
def test_uda_multiple_emitters_nested(self):
"""Test that we raise an Exception if a tuple-valued UDA doesn't appear
by itself in an emit expression."""
query = """
uda SumCountMean(x) {
[0 as _sum, 0 as _count];
[_sum + x, _count + 1];
[_sum, _count, _sum/_count];
};
out = [FROM SCAN(%s) AS X EMIT dept_id, SumCountMean(salary) + 5];
STORE(out, OUTPUT);
""" % self.emp_key
with self.assertRaises(NestedTupleExpressionException):
self.check_result(query, None)
__DECOMPOSED_UDA = """
uda LogicalAvg(x) {
[0 as _sum, 0 as _count];
[_sum + x, _count + 1];
float(_sum); -- Note bogus return value
};
uda LocalAvg(x) {
[0 as _sum, 0 as _count];
[_sum + x, _count + 1];
};
uda RemoteAvg(_local_sum, _local_count) {
[0 as _sum, 0 as _count];
[_sum + _local_sum, _count + _local_count];
[_sum/_count];
};
uda* LogicalAvg {LocalAvg, RemoteAvg};
"""
__ARG_MAX_UDA = """
def pickval(id, salary, val, _id, _salary, _val):
case when salary > _salary then val
when salary = _salary and id > _id then val
else _val end;
uda ArgMax(id, dept_id, name, salary) {
[0 as _id, 0 as _dept_id, "" as _name, 0 as _salary];
[pickval(id, salary, id, _id, _salary, _id),
pickval(id, salary, dept_id, _id, _salary, _dept_id),
pickval(id, salary, name, _id, _salary, _name),
pickval(id, salary, salary, _id, _salary, _salary)];
[_id, _dept_id, _name, _salary];
};
"""
__ARG_MAX_UDA_UNNECESSARY_EXPR = """
def pickval(id, salary, val, _id, _salary, _val):
case when salary > _salary then val
when salary = _salary and id > _id then val
else _val end;
uda ArgMax(id, dept_id, name, salary) {
[0 as _id, 0 as _dept_id, "" as _name, 0 as _salary];
[pickval(id, salary, greater(id, id), _id, _salary, _id),
pickval(id, salary, lesser(dept_id, dept_id), _id, _salary,
_dept_id),
pickval(id, salary, case when name="" then name else name end, _id,
_salary, _name),
pickval(id, salary, salary * 1, _id, _salary, _salary)];
[_id, _dept_id, _name, _salary];
};
"""
def test_decomposable_average_uda(self):
"""Test of a decomposed average UDA.
Note that the logical aggregate returns a broken value, so
this test only passes if we decompose the aggregate properly.
"""
query = """%s
out = [FROM SCAN(%s) AS X EMIT dept_id, LogicalAvg(salary)];
STORE(out, OUTPUT);
""" % (TestQueryFunctions.__DECOMPOSED_UDA, self.emp_key)
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(t[3])
tuples = []
for key, vals in result_dict.iteritems():
_cnt = len(vals)
_sum = sum(vals)
tuples.append((key, float(_sum) / _cnt))
self.check_result(query, collections.Counter(tuples))
def test_decomposable_nary_uda(self):
query = """
uda Sum2(x, y) {
[0 as sum_x, 0 as sum_y];
[sum_x + x, sum_y + y];
};
uda* Sum2 {Sum2, Sum2};
out = [FROM SCAN(%s) AS X EMIT
Sum2(id, salary) AS [id_sum, salary_sum]];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(t)
id_sum = sum(t[0] for t in self.emp_table.elements())
salary_sum = sum(t[3] for t in self.emp_table.elements())
tuples = [(id_sum, salary_sum)]
self.check_result(query, collections.Counter(tuples))
def test_arg_max_uda(self):
"""Test of an arg_max UDA.
"""
query = """
{arg}
emp = scan({emp});
out = [from emp emit ArgMax(id, dept_id, name, salary)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_arg_max_uda_with_references(self):
"""Test of an arg_max UDA with named, unnamed, and dotted
attribute references.
"""
query = """
{arg}
emp = scan({emp});
out = [from emp emit ArgMax(id, emp.dept_id, $2, emp.$3)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_arg_max_uda_with_functions(self):
"""Test of an arg_max UDA with expressions as inputs.
"""
query = """
{arg}
emp = scan({emp});
out = [from emp emit ArgMax(id,
greater(dept_id, dept_id),
case when id=1 then name else name end,
salary)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_decomposable_arg_max_uda(self):
"""Test of a decomposable arg_max UDA.
"""
query = """
{arg}
uda* ArgMax {{ArgMax, ArgMax}};
emp = scan({emp});
out = [from emp emit ArgMax(id, dept_id, name, salary)
as [a, b, c, d]];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
"""Test of an arg_max UDA with named, unnamed, and dotted
attribute references.
"""
def test_decomposable_arg_max_uda_with_references(self):
"""Test of a decomposable arg_max UDA with named, unnamed, and dotted
attribute references.
"""
query = """
{arg}
uda* ArgMax {{ArgMax, ArgMax}};
emp = scan({emp});
out = [from emp emit ArgMax(id, emp.dept_id, $2, emp.$3)
as [a, b, c, d]];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_decomposable_arg_max_uda_with_functions(self):
"""Test of a decomposable arg_max UDA with expressions as inputs.
"""
query = """
{arg}
uda* ArgMax {{ArgMax, ArgMax}};
emp = scan({emp});
out = [from emp emit ArgMax(id,
greater(dept_id, dept_id),
case when id=1 then name else name end,
salary)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_arg_max_uda_internal_exprs(self):
"""Test of an arg_max UDA.
"""
query = """
{arg}
emp = scan({emp});
out = [from emp emit ArgMax(id, dept_id, name, salary)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA_UNNECESSARY_EXPR, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_arg_max_uda_internal_exprs_with_references(self):
"""Test of an arg_max UDA with named, unnamed, and dotted
attribute references.
"""
query = """
{arg}
emp = scan({emp});
out = [from emp emit ArgMax(id, emp.dept_id, $2, emp.$3)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA_UNNECESSARY_EXPR, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_arg_max_uda_internal_exprs_with_functions(self):
"""Test of an arg_max UDA with expressions as inputs.
"""
query = """
{arg}
emp = scan({emp});
out = [from emp emit ArgMax(id,
greater(dept_id, dept_id),
case when id=1 then name else name end,
salary)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA_UNNECESSARY_EXPR, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_decomposable_arg_max_uda_internal_exprs(self):
"""Test of a decomposable arg_max UDA.
"""
query = """
{arg}
uda* ArgMax {{ArgMax, ArgMax}};
emp = scan({emp});
out = [from emp emit ArgMax(id, dept_id, name, salary)
as [a, b, c, d]];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA_UNNECESSARY_EXPR, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
"""Test of an arg_max UDA with named, unnamed, and dotted
attribute references.
"""
def test_decomposable_arg_max_uda_internal_exprs_with_references(self):
"""Test of a decomposable arg_max UDA with named, unnamed, and dotted
attribute references.
"""
query = """
{arg}
uda* ArgMax {{ArgMax, ArgMax}};
emp = scan({emp});
out = [from emp emit ArgMax(id, emp.dept_id, $2, emp.$3)
as [a, b, c, d]];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA_UNNECESSARY_EXPR, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_decomposable_arg_max_uda_internal_exprs_with_functions(self):
"""Test of a decomposable arg_max UDA with expressions as inputs.
"""
query = """
{arg}
uda* ArgMax {{ArgMax, ArgMax}};
emp = scan({emp});
out = [from emp emit ArgMax(id,
greater(dept_id, dept_id),
case when id=1 then name else name end,
salary)];
store(out, OUTPUT);
""".format(arg=self.__ARG_MAX_UDA_UNNECESSARY_EXPR, emp=self.emp_key)
tuples = [(a, b, c, d) for (a, b, c, d) in self.emp_table
if all(d > d1 or d == d1 and a >= a1
for a1, b1, c1, d1 in self.emp_table)]
self.check_result(query, collections.Counter(tuples))
def test_decomposable_average_uda_repeated(self):
"""Test of repeated invocations of decomposed UDAs."""
query = """%s
out = [FROM SCAN(%s) AS X EMIT dept_id,
LogicalAvg(salary) + LogicalAvg($0)];
STORE(out, OUTPUT);
""" % (TestQueryFunctions.__DECOMPOSED_UDA, self.emp_key)
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(t)
tuples = []
for key, vals in result_dict.iteritems():
_cnt = len(vals)
_salary_sum = sum(t[3] for t in vals)
_id_sum = sum(t[0] for t in vals)
tuples.append((key, (float(_salary_sum) + float(_id_sum)) / _cnt))
self.check_result(query, collections.Counter(tuples))
def test_decomposable_sum_uda(self):
"""Test of a decomposed sum UDA.
Note that the logical aggregate returns a broken value, so
this test only passes if we decompose the aggregate properly.
"""
query = """
uda MySumBroken(x) {
[0 as _sum];
[_sum + x];
17; -- broken
};
uda MySum(x) {
[0 as _sum];
[_sum + x];
};
uda* MySumBroken {MySum, MySum};
out = [FROM SCAN(%s) AS X EMIT dept_id, MySumBroken(salary)];
STORE(out, OUTPUT);
""" % self.emp_key
self.check_result(query, self.__aggregate_expected_result(sum))
def test_decomposable_uda_with_builtin_agg(self):
"""Test of a decomposed UDA + builtin aggregate.
Note that the logical aggregate returns a broken value, so
this test only passes if we decompose the aggregate properly.
"""
query = """
uda MySumBroken(x) {
[0 as _sum];
[_sum + x];
17; -- broken
};
uda MySum(x) {
[0 as _sum];
[_sum + x];
};
uda* MySumBroken {MySum, MySum};
out = [FROM SCAN(%s) AS X EMIT dept_id, MySumBroken(salary), SUM(id)];
STORE(out, OUTPUT);
""" % self.emp_key
result_dict = collections.defaultdict(list)
for t in self.emp_table.elements():
result_dict[t[1]].append(t)
tuples = []
for key, vals in result_dict.iteritems():
_salary_sum = sum(t[3] for t in vals)
_id_sum = sum(t[0] for t in vals)
tuples.append((key, _salary_sum, _id_sum))
self.check_result(query, collections.Counter(tuples))
def test_duplicate_decomposable_uda(self):
query = """
uda Agg1(x) {
[0 as _sum];
[_sum + x];
};
uda* Agg1 {Agg1, Agg1};
uda* Agg1 {Agg1, Agg1};
"""
with self.assertRaises(DuplicateFunctionDefinitionException):
self.check_result(query, None)
def test_decomposable_uda_type_check_fail1(self):
query = """
uda Logical(x) {
[0 as _sum];
[_sum + x];
};
uda Local(x, y) {
[0 as _sum];
[_sum + x];
};
uda* Logical {Local, Logical};
"""
with self.assertRaises(InvalidArgumentList):
self.check_result(query, None)
def test_decomposable_uda_type_check_fail2(self):
query = """
uda Logical(x) {
[0 as _sum];
[_sum + x];
};
uda Remote(x, y) {
[0 as _sum];
[_sum + x];
};
uda* Logical {Logical, Remote};
"""
with self.assertRaises(InvalidArgumentList):
self.check_result(query, None)
def test_decomposable_uda_type_check_fail3(self):
query = """
uda Logical(x) {
[0 as _sum];
[_sum + x];
};
uda Remote(x) {
[0 as _sum];
[_sum + x];
[1, 2, 3];
};
uda* Logical {Logical, Remote};
"""
with self.assertRaises(InvalidEmitList):
self.check_result(query, None)
def test_running_mean_sapply(self):
query = """
APPLY RunningMean(value) {
[0 AS _count, 0 AS _sum];
[_count + 1, _sum + value];
_sum / _count;
};
out = [FROM SCAN(%s) AS X EMIT id, RunningMean(X.salary)];
STORE(out, OUTPUT);
""" % self.emp_key
tps = []
_sum = 0
_count = 0
for emp in self.emp_table:
_sum += emp[3]
_count += 1
tps.append((emp[0], float(_sum) / _count))
self.check_result(query, collections.Counter(tps))
def test_sapply_multi_invocation(self):
query = """
APPLY RunningSum(x) {
[0 AS _sum];
[_sum + x];
_sum;
};
out = [FROM SCAN(%s) AS X
EMIT id, RunningSum(X.salary), RunningSum(id)];
STORE(out, OUTPUT);
""" % self.emp_key
tps = []
_sum1 = 0
_sum2 = 0
for emp in self.emp_table:
_sum1 += emp[3]
_sum2 += emp[0]
tps.append((emp[0], _sum1, _sum2))
self.check_result(query, collections.Counter(tps))
def test_118_regression(self):
"""Regression test for https://github.com/uwescience/datalogcompiler/issues/118""" # noqa
query = """
out = [FROM SCAN(%s) AS X WHERE dept_id = 2 AND salary = 5000 EMIT id];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[0],) for x in self.emp_table.elements()
if x[1] == 2 and x[3] == 5000])
self.check_result(query, expected)
def test_scan_emp_empty_statement(self):
"""Test with an empty statement."""
query = """
;;;
emp = SCAN(%s);
STORE(emp, OUTPUT);;;
""" % self.emp_key
self.check_result(query, self.emp_table)
def test_empty_statement_parse(self):
"""Program that contains nothing but empty statements."""
with self.assertRaises(MyrialCompileException):
self.check_result(";", None)
def test_case_binary(self):
query = """
emp = SCAN(%s);
rich = [FROM emp EMIT id, CASE WHEN salary > 15000
THEN salary // salary
ELSE 0 // salary END];
STORE(rich, OUTPUT);
""" % self.emp_key
def func(y):
if y > 15000:
return 1
else:
return 0
expected = collections.Counter(
[(x[0], func(x[3])) for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_case_ternary(self):
query = """
emp = SCAN(%s);
rich = [FROM emp EMIT id,
CASE WHEN salary <= 5000 THEN "poor"
WHEN salary <= 25000 THEN "middle class"
ELSE "rich"
END];
STORE(rich, OUTPUT);
""" % self.emp_key
def func(y):
if y <= 5000:
return 'poor'
elif y <= 25000:
return 'middle class'
else:
return 'rich'
expected = collections.Counter(
[(x[0], func(x[3])) for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_case_aggregate(self):
query = """
emp = SCAN(%s);
rich = [FROM emp EMIT SUM(3 * CASE WHEN salary > 15000
THEN 1 ELSE 0 END)];
STORE(rich, OUTPUT);
""" % self.emp_key
_sum = 3 * len([x for x in self.emp_table.elements()
if x[3] > 15000])
self.check_result(query, collections.Counter([(_sum,)]))
def test_case_unbox(self):
query = """
TH = [15000];
A = [1 AS one, 2 AS two, 3 AS three];
emp = SCAN(%s);
rich = [FROM emp EMIT SUM(*A.three * CASE WHEN salary > *TH
THEN 1 ELSE 0 END)];
STORE(rich, OUTPUT);
""" % self.emp_key
_sum = 3 * len([x for x in self.emp_table.elements()
if x[3] > 15000])
self.check_result(query, collections.Counter([(_sum,)]))
def test_default_column_names(self):
with open('examples/groupby1.myl') as fh:
query = fh.read()
self.execute_query(query)
scheme = self.db.get_scheme('OUTPUT')
self.assertEquals(scheme.getName(0), "_COLUMN0_")
self.assertEquals(scheme.getName(1), "id")
def test_worker_id(self):
query = """
X = [FROM SCAN(%s) AS X EMIT X.id, WORKER_ID()];
STORE(X, OUTPUT);
""" % self.emp_key
expected = collections.Counter([(x[0], 0) for x
in self.emp_table.elements()])
self.check_result(query, expected)
def test_flip_zero(self):
"""flip(0) should always evaluate to false"""
query = """
X = [FROM SCAN(%s) AS X WHERE flip(0) EMIT *];
STORE(X, OUTPUT);
""" % self.emp_key
expected = collections.Counter()
self.check_result(query, expected)
def test_flip_one(self):
"""flip(1) should always evaluate to true"""
query = """
X = [FROM SCAN(%s) AS X WHERE flip(1) EMIT *];
STORE(X, OUTPUT);
""" % self.emp_key
expected = collections.Counter(self.emp_table.elements())
self.check_result(query, expected)
def test_substr(self):
query = """
ZERO = [0];
THREE = [3];
out = [FROM SCAN(%s) AS X EMIT X.id, substr(X.name, *ZERO, *THREE)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[0], x[2][0:3]) for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_concat(self):
query = """
STRS = ["a" as a, "b" as b];
out = [FROM STRS EMIT concat(a, b)];
STORE(out, OUTPUT);
"""
expected = collections.Counter({('ab',): 1})
self.check_result(query, expected)
def test_byterange(self):
query = r"""
BYTES = [b'\xDE\xAD\xBE\xEF' AS bytes];
out = [FROM BYTES AS X EMIT byterange(X.bytes, 2, 4) as res];
STORE(out, OUTPUT);
"""
expected = collections.Counter({(b'\xBE\xEF',): 1})
self.check_result(query, expected)
def test_len(self):
query = """
out = [FROM SCAN(%s) AS X EMIT X.id, len(X.name)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[0], len(x[2])) for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_head(self):
query = """
out = [FROM SCAN(%s) AS X EMIT X.id, head(X.name, 10)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[0], x[2][0:10]) for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_tail(self):
query = """
ZERO = [0];
THREE = [3];
out = [FROM SCAN(%s) AS X EMIT X.id, tail(X.name, 10)];
STORE(out, OUTPUT);
""" % self.emp_key
expected = collections.Counter(
[(x[0], (lambda i: i if len(i) <= 10 else i[len(i) - 10:])(x[2]))
for x in self.emp_table.elements()])
self.check_result(query, expected)
def test_column_name_reserved(self):
query = """
T = EMPTY(x:INT);
A = [FROM T EMIT SafeDiv(x, 3) AS SafeDiv];
STORE (A, BadProgram);
"""
with self.assertRaises(ReservedTokenException):
self.check_result(query, None)
def test_bug_226(self):
query = """
T = scan({emp});
A = select id, salary from T where 1=1;
B = select id, salary from A where salary=90000;
C = select A.* from B, A where A.salary < B.salary;
STORE (C, OUTPUT);
""".format(emp=self.emp_key)
expected = collections.Counter(
(i, s) for (i, d, n, s) in self.emp_table
for (i2, d2, n2, s2) in self.emp_table
if s2 == 90000 and s < s2)
self.assertEquals(expected, self.execute_query(query))
def test_column_mixed_case_reserved(self):
query = """
T = EMPTY(x:INT);
A = [FROM T EMIT MAX(x) AS maX];
STORE (A, BadProgram);
"""
with self.assertRaises(ReservedTokenException):
self.check_result(query, None)
def test_variable_name_reserved(self):
query = """
T = EMPTY(x:INT);
avg = COUNTALL(T);
STORE (countall, BadProgram);
"""
with self.assertRaises(ReservedTokenException):
self.check_result(query, None)
def test_empty_query(self):
query = """
T1 = empty(x:INT);
"""
with self.assertRaises(MyrialCompileException):
self.check_result(query, None)
def test_sink(self):
query = """
ZERO = [0];
A = [from ZERO emit *];
SINK(A);
"""
self.evaluate_sink_query(query)
def test_string_cast(self):
query = """
emp = SCAN(%s);
bc = [FROM emp EMIT STRING(emp.dept_id) AS foo];
STORE(bc, OUTPUT);
""" % self.emp_key
ex = collections.Counter((str(d),) for (i, d, n, s) in self.emp_table)
ex_scheme = scheme.Scheme([('foo', types.STRING_TYPE)])
self.check_result(query, ex)
def test_float_cast(self):
query = """
emp = SCAN(%s);
bc = [FROM emp EMIT float(emp.dept_id) AS foo];
STORE(bc, OUTPUT);
""" % self.emp_key
ex = collections.Counter((float(d),) for (i, d, n, s) in self.emp_table) # noqa
ex_scheme = scheme.Scheme([('foo', types.DOUBLE_TYPE)])
self.check_result(query, ex, ex_scheme)
def test_scientific_notation(self):
literals = ["1.0e20", "3e40", "5e-6", ".7e8", ".9e-12",
"-3e45", "-6e-78", "9e+12", "3E4"]
query = """
emp = SCAN({});
bc = [FROM emp EMIT {}];
STORE(bc, OUTPUT);
""".format(self.emp_key, ','.join(literals))
ex = collections.Counter(
(tuple(map(float, literals)),) * len(self.emp_table)) # noqa
ex_scheme = scheme.Scheme([('$%d' % i, types.DOUBLE_TYPE)
for i in xrange(len(literals))])
self.check_result(query, ex, ex_scheme)
def test_sequence(self):
query = """
T1 = scan({rel});
store(T1, OUTPUT);
T2 = scan({rel});
store(T2, OUTPUT2);
""".format(rel=self.emp_key)
physical_plan = self.get_physical_plan(query)
self.assertIsInstance(physical_plan, raco.algebra.Sequence)
self.check_result(query, self.emp_table, output='OUTPUT')
self.check_result(query, self.emp_table, output='OUTPUT2')
def test_238_dont_renumber_columns(self):
# see https://github.com/uwescience/raco/issues/238
query = """
x = [1 as a, 2 as b];
y = [from x as x1, x as x2
emit x2.a, x2.b];
z = [from y emit a];
store(z, OUTPUT);"""
self.check_result(query, collections.Counter([(1,)]))
def test_implicit_column_names(self):
query = """
x = [1 as a, 2 as b];
y = [from x as x1, x as x2
emit $0, $1];
store(y, OUTPUT);"""
expected_scheme = scheme.Scheme([('a', types.LONG_TYPE),
('b', types.LONG_TYPE)])
self.check_result(query, collections.Counter([(1, 2)]),
scheme=expected_scheme)
def test_implicit_column_names2(self):
query = """
x = [1 as a, 2 as b];
y = [from x as x1, x as x2
emit $2, $3];
store(y, OUTPUT);"""
expected_scheme = scheme.Scheme([('a', types.LONG_TYPE),
('b', types.LONG_TYPE)])
self.check_result(query, collections.Counter([(1, 2)]),
scheme=expected_scheme)
def test_implicit_column_names3(self):
query = """
x = [1 as a, 2 as b];
y = [from x as x1, x as x2
emit $2, $1];
store(y, OUTPUT);"""
expected_scheme = scheme.Scheme([('a', types.LONG_TYPE),
('b', types.LONG_TYPE)])
self.check_result(query, collections.Counter([(1, 2)]),
scheme=expected_scheme)
def test_unbox_index_column_names(self):
query = """
x = [1 as a, 2 as b];
y = [from x as x1, x as x2
emit x2.$0, x2.$1];
store(y, OUTPUT);"""
expected_scheme = scheme.Scheme([('a', types.LONG_TYPE),
('b', types.LONG_TYPE)])
self.check_result(query, collections.Counter([(1, 2)]),
scheme=expected_scheme)
def test_duplicate_column_names(self):
query = """
x = [1 as a, 2 as b];
y = [from x as x1, x as x2 emit x1.a, x2.a];
store(y, OUTPUT);"""
expected_scheme = scheme.Scheme([('a', types.LONG_TYPE),
('a1', types.LONG_TYPE)])
self.check_result(query, collections.Counter([(1, 1)]),
scheme=expected_scheme)
def test_distinct_aggregate_combinations(self):
"""Test to make sure that aggregates of different columns are not
combined together by the optimizer."""
query = """
emp = scan(%s);
ans = [from emp emit sum(dept_id) as d, sum(salary) as s];
store(ans, OUTPUT);""" % self.emp_key
sum_dept_id = sum([e[1] for e in self.emp_table])
sum_salary = sum([e[3] for e in self.emp_table])
expected = collections.Counter([(sum_dept_id, sum_salary)])
self.check_result(query, expected)
def test_bug_245_dead_code_with_do_while_plan(self):
"""Test to make sure that a dead program (no Stores) with a DoWhile
throws the correct parse error."""
with open('examples/deadcode2.myl') as fh:
query = fh.read()
with self.assertRaises(MyrialCompileException):
self.check_result(query, None)
def test_simple_do_while(self):
"""count to 32 by powers of 2"""
with open('examples/iteration.myl') as fh:
query = fh.read()
expected = collections.Counter([(32, 5)])
self.check_result(query, expected, output="powersOfTwo")
def test_pyUDF_dotted_arguments(self):
query = """
T1=scan(%s);
out = [from T1 emit test(T1.id, T1.dept_id) As output];
store(out, OUTPUT);
""" % self.emp_key
plan = self.get_physical_plan(query, udas=[('test', LONG_TYPE)])
apply = [op for op in plan.walk() if isinstance(op, Apply)][0]
ref = apply.emitters[0][1]
assert str(ref) == "PYUDF(test, ['id', 'dept_id'], LONG_TYPE)"
def test_pyUDF_with_positional_arguments(self):
query = """
T1=scan(%s);
out = [from T1 emit test($0, $1) As output];
store(out, OUTPUT);
""" % self.emp_key
plan = self.get_physical_plan(query, udas=[('test', LONG_TYPE)])
apply = [op for op in plan.walk() if isinstance(op, Apply)][0]
ref = apply.emitters[0][1]
assert str(ref) == "PYUDF(test, ['$0', '$1'], LONG_TYPE)"
def test_pyUDF_uda(self):
query = """
uda Foo(x){
[0 as _count, 0 as _sum];
[ _count+1, test_uda(_sum, x)];
[ test_uda(_sum,_count) ];
};
T1 = [from scan(%s) as t emit Foo(t.id) As mask];
store(T1, out);
""" % self.emp_key
self.get_physical_plan(query, udas=[('test_uda', LONG_TYPE)])
| 32.304146
| 98
| 0.53515
| 12,489
| 97,397
| 4.011931
| 0.049644
| 0.042611
| 0.063467
| 0.084223
| 0.811795
| 0.792156
| 0.770941
| 0.741623
| 0.704401
| 0.66676
| 0
| 0.020002
| 0.33114
| 97,397
| 3,014
| 99
| 32.314864
| 0.749083
| 0.027352
| 0
| 0.657673
| 0
| 0.019815
| 0.380147
| 0.004608
| 0
| 0
| 0
| 0
| 0.024874
| 1
| 0.094013
| false
| 0
| 0.006745
| 0.000843
| 0.108769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
04975940ea3ea1131eb3064778768efbd6553477
| 42
|
py
|
Python
|
worktory/connection/__init__.py
|
renatoalmeidaoliveira/Worktory
|
b580e819efb8c6be57f274bbd5f15cd6eaacf3d5
|
[
"MIT"
] | 4
|
2021-10-21T10:22:25.000Z
|
2022-02-01T20:04:39.000Z
|
worktory/connection/__init__.py
|
renatoalmeidaoliveira/Worktory
|
b580e819efb8c6be57f274bbd5f15cd6eaacf3d5
|
[
"MIT"
] | null | null | null |
worktory/connection/__init__.py
|
renatoalmeidaoliveira/Worktory
|
b580e819efb8c6be57f274bbd5f15cd6eaacf3d5
|
[
"MIT"
] | null | null | null |
from worktory.connection.wrappers import *
| 42
| 42
| 0.857143
| 5
| 42
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
04ad25a06a4e86ee6103a130ee6fcc2998609a1f
| 42
|
py
|
Python
|
mae_envs/modules/__init__.py
|
bglick13/multi-agent-emergence-environments
|
e02d66f0734d95470d15a4508ff369a75fa093a4
|
[
"MIT"
] | 1,317
|
2019-09-17T15:50:42.000Z
|
2022-03-30T18:24:24.000Z
|
mae_envs/modules/__init__.py
|
jihan1218/multi-agent-predator-prey
|
ebf11e601de07e80c27c87dc41837d91f53e9465
|
[
"MIT"
] | 35
|
2019-09-20T11:36:20.000Z
|
2022-02-10T00:24:27.000Z
|
mae_envs/modules/__init__.py
|
jihan1218/multi-agent-predator-prey
|
ebf11e601de07e80c27c87dc41837d91f53e9465
|
[
"MIT"
] | 279
|
2019-09-18T00:14:19.000Z
|
2022-03-30T09:39:12.000Z
|
from .module import *
from .util import *
| 14
| 21
| 0.714286
| 6
| 42
| 5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 22
| 21
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
04cb12404c7f727798e431743a96f04637512481
| 43
|
py
|
Python
|
test/lv1/lv2/include_lv2.py
|
pavelkukov/fsimport
|
7020b941259fe78e20829029b9aa828d2de8300d
|
[
"MIT"
] | null | null | null |
test/lv1/lv2/include_lv2.py
|
pavelkukov/fsimport
|
7020b941259fe78e20829029b9aa828d2de8300d
|
[
"MIT"
] | null | null | null |
test/lv1/lv2/include_lv2.py
|
pavelkukov/fsimport
|
7020b941259fe78e20829029b9aa828d2de8300d
|
[
"MIT"
] | null | null | null |
def get_text():
return 'text from lv2'
| 14.333333
| 26
| 0.651163
| 7
| 43
| 3.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.232558
| 43
| 2
| 27
| 21.5
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
04d2c590023129d48715f95ae1ef7a1103a39cae
| 494
|
py
|
Python
|
ckanext/datapusher/logic/auth.py
|
dassolkim/default-ckan
|
d18593c59fe8a4c6aa7ed8a29b92d16979085b1b
|
[
"Apache-2.0"
] | 1
|
2022-02-15T17:21:02.000Z
|
2022-02-15T17:21:02.000Z
|
ckanext/datapusher/logic/auth.py
|
dassolkim/default-ckan
|
d18593c59fe8a4c6aa7ed8a29b92d16979085b1b
|
[
"Apache-2.0"
] | 7
|
2021-02-02T22:03:03.000Z
|
2021-06-22T02:13:00.000Z
|
ckanext/datapusher/logic/auth.py
|
dassolkim/default-ckan
|
d18593c59fe8a4c6aa7ed8a29b92d16979085b1b
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
import ckanext.datastore.logic.auth as auth
import ckan.plugins as p
def datapusher_submit(context, data_dict):
return auth.datastore_auth(context, data_dict)
def datapusher_status(context, data_dict):
return auth.datastore_auth(context, data_dict)
def datapusher_hook(context, data_dict):
return {'success': True}
def resource_upload(context, data_dict):
# return p.toolkit.check_access('resource_show', context, data_dict)
return {'success': True}
| 26
| 72
| 0.765182
| 69
| 494
| 5.26087
| 0.42029
| 0.212121
| 0.289256
| 0.289256
| 0.539945
| 0.539945
| 0.363636
| 0.363636
| 0.363636
| 0.363636
| 0
| 0.002336
| 0.133603
| 494
| 18
| 73
| 27.444444
| 0.845794
| 0.165992
| 0
| 0.4
| 0
| 0
| 0.03423
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
b6ed1b96b8fd35eb962048d225673833e838b336
| 2,553
|
py
|
Python
|
libsaas/services/uservoice/comments.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 155
|
2015-01-27T15:17:59.000Z
|
2022-02-20T00:14:08.000Z
|
libsaas/services/uservoice/comments.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 14
|
2015-01-12T08:22:37.000Z
|
2021-06-16T19:49:31.000Z
|
libsaas/services/uservoice/comments.py
|
MidtownFellowship/libsaas
|
541bb731b996b08ede1d91a235cb82895765c38a
|
[
"MIT"
] | 43
|
2015-01-28T22:41:45.000Z
|
2021-09-21T04:44:26.000Z
|
from libsaas import http, parsers
from libsaas.services import base
from . import resource, flags
class CommentsBase(resource.UserVoiceTextResource):
path = 'comments'
def wrap_object(self, name):
return {'comment': {'text': name}}
class Comments(CommentsBase):
def create(self, obj):
raise base.MethodNotSupported()
class ForumSuggestionComment(CommentsBase):
@base.resource(flags.SuggestionCommentFlags)
def flags(self):
"""
Return the resource corresponding to all the flags of this comment.
"""
return flags.SuggestionCommentFlags(self)
class ForumSuggestionComments(CommentsBase):
@base.apimethod
def get(self, page=None, per_page=None, filter=None, sort=None):
"""
Fetch comments on this suggestion.
:var page: Where should paging start. If left as `None`, the first page
is returned.
:vartype page: int
:var per_page: How many objects sould be returned. If left as `None`,
10 objects are returned.
:vartype per_page: int
:var filter: The kind of comments to return, see upstream
documentation for possible values.
:vartype filter: str
:var sort: How should the returned collection be sorted. Refer to
upstream documentation for possible values.
:vartype sort: str
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_json
class UserComments(CommentsBase):
def create(self, obj):
raise base.MethodNotSupported()
@base.apimethod
def get(self, page=None, per_page=None, filter=None, sort=None):
"""
Fetch comments from this user.
:var page: Where should paging start. If left as `None`, the first page
is returned.
:vartype page: int
:var per_page: How many objects sould be returned. If left as `None`,
10 objects are returned.
:vartype per_page: int
:var filter: The kind of comments to return, see upstream
documentation for possible values.
:vartype filter: str
:var sort: How should the returned collection be sorted. Refer to
upstream documentation for possible values.
:vartype sort: str
"""
params = base.get_params(None, locals())
request = http.Request('GET', self.get_url(), params)
return request, parsers.parse_json
| 28.366667
| 79
| 0.642382
| 303
| 2,553
| 5.369637
| 0.277228
| 0.025814
| 0.019668
| 0.029502
| 0.716656
| 0.716656
| 0.716656
| 0.716656
| 0.649047
| 0.649047
| 0
| 0.002163
| 0.275754
| 2,553
| 89
| 80
| 28.685393
| 0.877772
| 0.432041
| 0
| 0.5
| 0
| 0
| 0.021044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0.107143
| 0.035714
| 0.678571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
b6f5539c717e68103eabc1c40b0e3945ced28351
| 216
|
py
|
Python
|
webapp/polls/admin.py
|
tristanrobert/batch7_rse
|
d769b328d7f027fe5486020dff239fc3ca54409e
|
[
"MIT"
] | 5
|
2020-04-18T16:47:09.000Z
|
2021-12-13T10:26:59.000Z
|
webapp/polls/admin.py
|
tristanrobert/batch7_rse
|
d769b328d7f027fe5486020dff239fc3ca54409e
|
[
"MIT"
] | 47
|
2020-05-01T12:46:47.000Z
|
2021-08-23T20:44:16.000Z
|
webapp/polls/admin.py
|
tristanrobert/batch7_rse
|
d769b328d7f027fe5486020dff239fc3ca54409e
|
[
"MIT"
] | 4
|
2020-03-15T17:44:19.000Z
|
2020-07-23T07:57:30.000Z
|
from django.contrib import admin
from .models import Company, DPEF, Sentence, ActivitySector
admin.site.register(Company)
admin.site.register(DPEF)
admin.site.register(Sentence)
admin.site.register(ActivitySector)
| 24
| 59
| 0.824074
| 28
| 216
| 6.357143
| 0.428571
| 0.202247
| 0.382022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078704
| 216
| 8
| 60
| 27
| 0.894472
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
8e1a9b05226b0dde72a54aa2f8e04e84738c33a1
| 40
|
py
|
Python
|
pyqt_auto_search_bar/__init__.py
|
yjg30737/pyqt-auto-search-bar
|
12a2933cfacbf7df7c1ce2030f94755b02ca7c1f
|
[
"MIT"
] | null | null | null |
pyqt_auto_search_bar/__init__.py
|
yjg30737/pyqt-auto-search-bar
|
12a2933cfacbf7df7c1ce2030f94755b02ca7c1f
|
[
"MIT"
] | null | null | null |
pyqt_auto_search_bar/__init__.py
|
yjg30737/pyqt-auto-search-bar
|
12a2933cfacbf7df7c1ce2030f94755b02ca7c1f
|
[
"MIT"
] | null | null | null |
from .autoSearchBar import AutoSearchBar
| 40
| 40
| 0.9
| 4
| 40
| 9
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 40
| 1
| 40
| 40
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6d1436611ae465bd0239528163c02c7fd067bcd3
| 6,316
|
py
|
Python
|
datasets/custom_data_loader.py
|
wymGAKKI/saps
|
fcbcded83ed6f379ea3b29204b080b63b485a278
|
[
"MIT"
] | null | null | null |
datasets/custom_data_loader.py
|
wymGAKKI/saps
|
fcbcded83ed6f379ea3b29204b080b63b485a278
|
[
"MIT"
] | null | null | null |
datasets/custom_data_loader.py
|
wymGAKKI/saps
|
fcbcded83ed6f379ea3b29204b080b63b485a278
|
[
"MIT"
] | null | null | null |
import torch.utils.data
def customDataloader(args):
args.log.printWrite("=> fetching img pairs in %s" % (args.data_dir))
datasets = __import__('datasets.' + args.dataset)
dataset_file = getattr(datasets, args.dataset)
train_set = getattr(dataset_file, args.dataset)(args, args.data_dir, 'train')
val_set = getattr(dataset_file, args.dataset)(args, args.data_dir, 'val')
if args.concat_data:
args.log.printWrite('****** Using cocnat data ******')
args.log.printWrite("=> fetching img pairs in '{}'".format(args.data_dir2))
train_set2 = getattr(dataset_file, args.dataset)(args, args.data_dir2, 'train')
val_set2 = getattr(dataset_file, args.dataset)(args, args.data_dir2, 'val')
train_set = torch.utils.data.ConcatDataset([train_set, train_set2])
val_set = torch.utils.data.ConcatDataset([val_set, val_set2])
args.log.printWrite('Found Data:\t %d Train and %d Val' % (len(train_set), len(val_set)))
args.log.printWrite('\t Train Batch: %d, Val Batch: %d' % (args.batch, args.val_batch))
train_loader = torch.utils.data.DataLoader(train_set, batch_size=args.batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=True)
test_loader = torch.utils.data.DataLoader(val_set , batch_size=args.val_batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=False)
return train_loader, test_loader
def benchmarkLoader(args):
args.log.printWrite("=> fetching img pairs in 'data/%s'" % (args.benchmark))
datasets = __import__('datasets.' + args.benchmark)
dataset_file = getattr(datasets, args.benchmark)
test_set = getattr(dataset_file, args.benchmark)(args, 'test')
args.log.printWrite('Found Benchmark Data: %d samples' % (len(test_set)))
args.log.printWrite('\t Test Batch %d' % (args.test_batch))
test_loader = torch.utils.data.DataLoader(test_set, batch_size=args.test_batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=False)
return test_loader
def shadowDataloader(args):
args.log.printWrite("=> fetching img pairs in %s" % (args.mydata_dir))
datasets = __import__('datasets.' + args.shadowdataset)
dataset_file = getattr(datasets, args.shadowdataset)
train_set = getattr(dataset_file, args.shadowdataset)(args, args.mydata_dir, 'train')
val_set = getattr(dataset_file, args.shadowdataset)(args, args.mydata_dir, 'val')
args.log.printWrite('Found Data:\t %d Train and %d Val' % (len(train_set), len(val_set)))
args.log.printWrite('\t Train Batch: %d, Val Batch: %d' % (args.batch, args.val_batch))
train_loader = torch.utils.data.DataLoader(train_set, batch_size=args.batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=True)
test_loader = torch.utils.data.DataLoader(val_set , batch_size=args.val_batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=False)
return train_loader, test_loader
def shadowTestDataloader(args):
args.log.printWrite("=> fetching img pairs in %s" % (args.mydata_dir))
datasets = __import__('datasets.' + args.shadowdataset)
dataset_file = getattr(datasets, args.shadowdataset)
#train_set = getattr(dataset_file, args.shadowdataset)(args, args.mydata_dir, 'train')
val_set = getattr(dataset_file, args.shadowdataset)(args, args.mydata_dir, 'val')
test_loader = torch.utils.data.DataLoader(val_set , batch_size=args.test_batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=False)
return test_loader
def reflectanceDataloader(args):
args.log.printWrite("=> fetching img pairs in %s" % (args.mydata_dir))
datasets = __import__('datasets.' + args.mydataset)
dataset_file = getattr(datasets, args.mydataset)
train_set = getattr(dataset_file, args.mydataset)(args, args.mydata_dir, 'train')
val_set = getattr(dataset_file, args.mydataset)(args, args.mydata_dir, 'val')
args.log.printWrite('Found Data:\t %d Train and %d Val' % (len(train_set), len(val_set)))
args.log.printWrite('\t Train Batch: %d, Val Batch: %d' % (args.batch, args.val_batch))
train_loader = torch.utils.data.DataLoader(train_set, batch_size=args.batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=True)
test_loader = torch.utils.data.DataLoader(val_set , batch_size=args.val_batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=False)
return train_loader, test_loader
def myDataloader(args):
args.log.printWrite("=> fetching img pairs in %s" % (args.mydata_dir))
datasets = __import__('datasets.' + args.mydataset)
dataset_file = getattr(datasets, args.mydataset)
train_set = getattr(dataset_file, args.mydataset)(args, args.mydata_dir, 'train')
val_set = getattr(dataset_file, args.mydataset)(args, args.mydata_dir, 'val')
args.log.printWrite('Found Data:\t %d Train and %d Val' % (len(train_set), len(val_set)))
args.log.printWrite('\t Train Batch: %d, Val Batch: %d' % (args.batch, args.val_batch))
train_loader = torch.utils.data.DataLoader(train_set, batch_size=args.batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=True)
test_loader = torch.utils.data.DataLoader(val_set , batch_size=args.val_batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=False)
return train_loader, test_loader
def pokemonDataloader(args):
args.log.printWrite("=> fetching img pairs in %s" % (args.mydata_dir))
datasets = __import__('datasets.' + args.pokemondataset)
dataset_file = getattr(datasets, args.pokemondataset)
train_set = getattr(dataset_file, args.pokemondataset)(args, args.pokemondata_dir, 'train')
val_set = getattr(dataset_file, args.pokemondataset)(args, args.pokemondata_dir, 'val')
args.log.printWrite('Found Data:\t %d Train and %d Val' % (len(train_set), len(val_set)))
args.log.printWrite('\t Train Batch: %d, Val Batch: %d' % (args.batch, args.val_batch))
train_loader = torch.utils.data.DataLoader(train_set, batch_size=args.batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=True)
test_loader = torch.utils.data.DataLoader(val_set , batch_size=args.val_batch,
num_workers=args.workers, pin_memory=args.cuda, shuffle=False)
return train_loader, test_loader
| 56.392857
| 95
| 0.718968
| 880
| 6,316
| 4.945455
| 0.070455
| 0.055607
| 0.082031
| 0.075827
| 0.892233
| 0.840533
| 0.830423
| 0.822381
| 0.809743
| 0.809743
| 0
| 0.001297
| 0.145662
| 6,316
| 112
| 96
| 56.392857
| 0.805226
| 0.013458
| 0
| 0.637363
| 0
| 0
| 0.120687
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.087912
| 0
| 0.241758
| 0.230769
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6d56ff09987411ff64ea142abcb153ed5e0db839
| 123
|
py
|
Python
|
stream/tests.py
|
0xdc/estuary-app-livestream
|
dd581e5e1904c0e669ca830e44de5d8f8c37fd46
|
[
"Apache-2.0"
] | null | null | null |
stream/tests.py
|
0xdc/estuary-app-livestream
|
dd581e5e1904c0e669ca830e44de5d8f8c37fd46
|
[
"Apache-2.0"
] | null | null | null |
stream/tests.py
|
0xdc/estuary-app-livestream
|
dd581e5e1904c0e669ca830e44de5d8f8c37fd46
|
[
"Apache-2.0"
] | null | null | null |
from django.test import TestCase
# Create your tests here.
from .models import Stream
class StreamTests(TestCase):
pass
| 15.375
| 32
| 0.788618
| 17
| 123
| 5.705882
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154472
| 123
| 7
| 33
| 17.571429
| 0.932692
| 0.186992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
6100d0eb347d35719f7d9d0f624cf9330b64ec0d
| 243
|
py
|
Python
|
kornia/contrib/__init__.py
|
lyhyl/kornia
|
5bd3aeb0d54dedac01e6eaf8bac37779bab0bec5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
kornia/contrib/__init__.py
|
lyhyl/kornia
|
5bd3aeb0d54dedac01e6eaf8bac37779bab0bec5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
kornia/contrib/__init__.py
|
lyhyl/kornia
|
5bd3aeb0d54dedac01e6eaf8bac37779bab0bec5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from kornia.contrib.connected_components import connected_components
from kornia.contrib.extract_patches import extract_tensor_patches, ExtractTensorPatches
__all__ = ["connected_components", "extract_tensor_patches", "ExtractTensorPatches"]
| 48.6
| 87
| 0.872428
| 25
| 243
| 8
| 0.44
| 0.285
| 0.17
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061728
| 243
| 4
| 88
| 60.75
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0.255144
| 0.090535
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b61e9da3c87acd3f760d88739efe3baccedfc4fa
| 132
|
py
|
Python
|
clone/admin.py
|
gamersdestiny/SB-Admin-clone
|
4f8d8e757e8c9495d6a62f022b9d646f5607844b
|
[
"MIT"
] | null | null | null |
clone/admin.py
|
gamersdestiny/SB-Admin-clone
|
4f8d8e757e8c9495d6a62f022b9d646f5607844b
|
[
"MIT"
] | null | null | null |
clone/admin.py
|
gamersdestiny/SB-Admin-clone
|
4f8d8e757e8c9495d6a62f022b9d646f5607844b
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
import clone.models as mod
admin.site.register(mod.lineChart)
admin.site.register(mod.donutChart)
| 22
| 35
| 0.825758
| 20
| 132
| 5.45
| 0.65
| 0.165138
| 0.311927
| 0.366972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 132
| 5
| 36
| 26.4
| 0.900826
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
b62a61ad51d3d719894e73ad18ad21388577bd7f
| 734
|
py
|
Python
|
swap/router_addresses.py
|
samirma/BasicDefiToolkit
|
9f39c710cd7c5cd0f0fb648443e0247f76812521
|
[
"MIT"
] | null | null | null |
swap/router_addresses.py
|
samirma/BasicDefiToolkit
|
9f39c710cd7c5cd0f0fb648443e0247f76812521
|
[
"MIT"
] | null | null | null |
swap/router_addresses.py
|
samirma/BasicDefiToolkit
|
9f39c710cd7c5cd0f0fb648443e0247f76812521
|
[
"MIT"
] | null | null | null |
spooky_factory = "0x152eE697f2E276fA89E96742e9bB9aB1F2E61bE3"
hyper_factory = "0x991152411A7B5A14A8CF0cDDE8439435328070dF"
spirit_factory = "0xEF45d134b73241eDa7703fa787148D9C9F4950b0"
waka_factory = "0xB2435253C71FcA27bE41206EB2793E44e1Df6b6D"
sushi_factory = "0xc35DADB65012eC5796536bD9864eD8773aBc74C4"
pancake_factory = '0xcA143Ce32Fe78f1f7019d7d551a6402fC5350c73'
spooky_router = '0xF491e7B69E4244ad4002BC14e878a34207E38c29'
hyper_router = '0x53c153a0df7E050BbEFbb70eE9632061f12795fB'
spirit_router = '0x16327E3FbDaCA3bcF7E38F5Af2599D2DDc33aE52'
waka_router = '0x7B17021FcB7Bc888641dC3bEdfEd3734fCaf2c87'
sushi_router = '0x1b02dA8Cb0d097eB8D57A175b88c7D8b47997506'
pancake_router = '0x10ED43C718714eb63d5aA57B78B54704E256024E'
| 48.933333
| 62
| 0.899183
| 36
| 734
| 18
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.451149
| 0.051771
| 734
| 14
| 63
| 52.428571
| 0.479885
| 0
| 0
| 0
| 0
| 0
| 0.687585
| 0.687585
| 0
| 0
| 0.687585
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b6373058cf847e1faad8841bce65ba83bde5f297
| 2,928
|
py
|
Python
|
tests/test_pybunpro.py
|
patrickayoup/pybunpro
|
961270c256b0068b97d2cb6b68f4f376a15d24b4
|
[
"MIT"
] | 1
|
2019-05-21T17:37:13.000Z
|
2019-05-21T17:37:13.000Z
|
tests/test_pybunpro.py
|
patrickayoup/pybunpro
|
961270c256b0068b97d2cb6b68f4f376a15d24b4
|
[
"MIT"
] | 16
|
2019-05-09T12:30:50.000Z
|
2021-02-02T22:17:24.000Z
|
tests/test_pybunpro.py
|
patrickayoup/pybunpro
|
961270c256b0068b97d2cb6b68f4f376a15d24b4
|
[
"MIT"
] | null | null | null |
import pytest
from click.testing import CliRunner
from pybunpro.__main__ import cli
class TestPyBunpro(object):
@pytest.fixture
def runner(self):
return CliRunner()
def test_study_queue(self, requests_mock, api_key, runner,
mock_study_queue_response,
user_information,
study_queue):
requests_mock.get(f'https://bunpro.jp/api/user/{api_key}/study_queue',
json=mock_study_queue_response)
result = runner.invoke(cli, ['--api-key', api_key,
'study-queue'])
assert result.exit_code == 0
assert str(user_information) in result.output
assert str(study_queue) in result.output
def test_study_queue_error(self, requests_mock, api_key, runner,
error_response):
requests_mock.get(f'https://bunpro.jp/api/user/{api_key}/study_queue',
json=error_response, status_code=400)
result = runner.invoke(cli, ['--api-key', api_key,
'study-queue'])
assert result.exit_code == 1
assert 'User does not exist' in result.output
def test_recent_items(self, requests_mock, api_key, runner,
mock_recent_items_response,
user_information,
grammar_point):
requests_mock.get(f'https://bunpro.jp/api/user/{api_key}/recent_items',
json=mock_recent_items_response)
result = runner.invoke(cli, ['--api-key', api_key,
'recent-items'])
assert result.exit_code == 0
assert str(user_information) in result.output
assert str([grammar_point]) in result.output
def test_recent_items_error(self, requests_mock, api_key, runner,
error_response):
requests_mock.get(f'https://bunpro.jp/api/user/{api_key}/recent_items',
json=error_response, status_code=400)
result = runner.invoke(cli, ['--api-key', api_key,
'recent-items'])
assert result.exit_code == 1
assert 'User does not exist' in result.output
def test_debug_mode(self, requests_mock, api_key, runner,
mock_recent_items_response,
user_information,
grammar_point,
caplog):
requests_mock.get(f'https://bunpro.jp/api/user/{api_key}/recent_items',
json=mock_recent_items_response)
result = runner.invoke(cli, ['--api-key', api_key,
'--debug',
'recent-items'])
assert result.exit_code == 0
assert 'Debug Mode Enabled' in caplog.text
| 40.109589
| 79
| 0.550888
| 322
| 2,928
| 4.745342
| 0.180124
| 0.078534
| 0.054974
| 0.062173
| 0.812827
| 0.799084
| 0.799084
| 0.75
| 0.725131
| 0.719895
| 0
| 0.005839
| 0.356557
| 2,928
| 72
| 80
| 40.666667
| 0.805202
| 0
| 0
| 0.614035
| 0
| 0
| 0.139686
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 1
| 0.105263
| false
| 0
| 0.052632
| 0.017544
| 0.192982
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1e1d2f13f2435f2df5dc525e2472b3c0f7ff822f
| 46
|
py
|
Python
|
src/main-gui.py
|
FlingJLJ/ThrowawayNameGenerator
|
42324c5b13ab91f7eb9047e7c704fbd6812f07ec
|
[
"Unlicense"
] | null | null | null |
src/main-gui.py
|
FlingJLJ/ThrowawayNameGenerator
|
42324c5b13ab91f7eb9047e7c704fbd6812f07ec
|
[
"Unlicense"
] | null | null | null |
src/main-gui.py
|
FlingJLJ/ThrowawayNameGenerator
|
42324c5b13ab91f7eb9047e7c704fbd6812f07ec
|
[
"Unlicense"
] | null | null | null |
import generator
from tkinter import Tk
| 9.2
| 23
| 0.73913
| 6
| 46
| 5.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 46
| 4
| 24
| 11.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1eb33e7d6f69e6cc3887ea546b3d94fd2c8984ce
| 95
|
py
|
Python
|
generate/python/__init__.py
|
Luthaf/Chemharp-bindgen
|
7d25556773fb5fe22dd1dbb0bd0d34fb2e6dccb8
|
[
"MIT"
] | null | null | null |
generate/python/__init__.py
|
Luthaf/Chemharp-bindgen
|
7d25556773fb5fe22dd1dbb0bd0d34fb2e6dccb8
|
[
"MIT"
] | 2
|
2018-02-25T21:46:45.000Z
|
2018-11-19T22:39:54.000Z
|
generate/python/__init__.py
|
chemfiles/bindgen
|
7d25556773fb5fe22dd1dbb0bd0d34fb2e6dccb8
|
[
"MIT"
] | null | null | null |
# -* coding: utf-8 -*
"""Generate FFI for Python ctypes module"""
from .ffi import write_ffi
| 15.833333
| 43
| 0.673684
| 14
| 95
| 4.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012821
| 0.178947
| 95
| 5
| 44
| 19
| 0.794872
| 0.610526
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1edf819f9904d1121b597dd83cad79fcd78fb2e0
| 42
|
py
|
Python
|
src/ttkbootstrap/dialogs/__init__.py
|
dmalves/ttkbootstrap
|
04d441c7a0cfbe6a2debea80c41994201dfc5562
|
[
"MIT"
] | 2
|
2022-01-23T01:48:38.000Z
|
2022-01-24T00:30:58.000Z
|
src/ttkbootstrap/dialogs/__init__.py
|
dmalves/ttkbootstrap
|
04d441c7a0cfbe6a2debea80c41994201dfc5562
|
[
"MIT"
] | null | null | null |
src/ttkbootstrap/dialogs/__init__.py
|
dmalves/ttkbootstrap
|
04d441c7a0cfbe6a2debea80c41994201dfc5562
|
[
"MIT"
] | 1
|
2022-03-19T08:47:08.000Z
|
2022-03-19T08:47:08.000Z
|
from ttkbootstrap.dialogs.dialogs import *
| 42
| 42
| 0.857143
| 5
| 42
| 7.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
94aa574f844d10fd95c7ce670f9a65a1c684737a
| 159
|
py
|
Python
|
end2you/models/model.py
|
FedericoCozziVM/end2you
|
187ec793d26807cf92347fa093b922a29651893e
|
[
"BSD-3-Clause"
] | null | null | null |
end2you/models/model.py
|
FedericoCozziVM/end2you
|
187ec793d26807cf92347fa093b922a29651893e
|
[
"BSD-3-Clause"
] | null | null | null |
end2you/models/model.py
|
FedericoCozziVM/end2you
|
187ec793d26807cf92347fa093b922a29651893e
|
[
"BSD-3-Clause"
] | null | null | null |
from abc import ABCMeta, abstractmethod
class Model(metaclass=ABCMeta):
@abstractmethod
def create_model(*args, **kwargs):
pass
| 17.666667
| 39
| 0.647799
| 16
| 159
| 6.375
| 0.8125
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.27044
| 159
| 9
| 40
| 17.666667
| 0.87931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0.2
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
a20cfa407b132673c8b21da1e26e90284f8eb832
| 225
|
py
|
Python
|
understat/constants.py
|
arkjinli/understat
|
09d8d839f07d433e5eefde2592f570986661ac93
|
[
"MIT"
] | null | null | null |
understat/constants.py
|
arkjinli/understat
|
09d8d839f07d433e5eefde2592f570986661ac93
|
[
"MIT"
] | null | null | null |
understat/constants.py
|
arkjinli/understat
|
09d8d839f07d433e5eefde2592f570986661ac93
|
[
"MIT"
] | null | null | null |
BASE_URL = "https://understat.com/"
LEAGUE_URL = "https://understat.com/league/{}/{}"
PLAYER_URL = "https://understat.com/player/{}"
TEAM_URL = "https://understat.com/team/{}/{}"
PATTERN = r"{}\s+=\s+JSON.parse\(\'(.*?)\'\)"
| 37.5
| 49
| 0.617778
| 29
| 225
| 4.655172
| 0.448276
| 0.237037
| 0.503704
| 0.592593
| 0.385185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 225
| 5
| 50
| 45
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0.671111
| 0.142222
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a21eb6e126b51ae2964071338a60f4514b00e11b
| 11,551
|
py
|
Python
|
dual_encoder/keras_layers_test.py
|
garyxcheng/federated
|
ba7133ead6127af71ea9356e26bfd05c02f8324a
|
[
"Apache-2.0"
] | 330
|
2020-09-14T23:10:16.000Z
|
2022-03-30T19:49:19.000Z
|
dual_encoder/keras_layers_test.py
|
garyxcheng/federated
|
ba7133ead6127af71ea9356e26bfd05c02f8324a
|
[
"Apache-2.0"
] | 52
|
2020-09-30T06:10:51.000Z
|
2022-03-31T19:25:16.000Z
|
dual_encoder/keras_layers_test.py
|
garyxcheng/federated
|
ba7133ead6127af71ea9356e26bfd05c02f8324a
|
[
"Apache-2.0"
] | 119
|
2020-09-24T04:54:46.000Z
|
2022-03-31T21:46:57.000Z
|
# Copyright 2021, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from absl.testing import absltest
import tensorflow as tf
from dual_encoder import keras_layers
l2_normalize_fn = lambda x: tf.keras.backend.l2_normalize(x, axis=-1)
class KerasLayersTest(absltest.TestCase):
def test_masked_average_3d(self):
masked_average_layer = keras_layers.MaskedAverage(1)
inputs = tf.constant([
[[0.5, 0.3], [0.4, 0.1], [0.4, 0.1]],
[[0.6, 0.8], [0.5, 0.4], [0.4, 0.1]],
[[0.9, 0.4], [0.4, 0.1], [0.4, 0.1]],
[[0.9, 0.4], [0.4, 0.1], [0.4, 0.1]],
])
mask = tf.constant([[True, True, True],
[False, False, True],
[True, False, False],
[False, False, False]])
output_average = masked_average_layer.call(inputs, mask=mask)
output_mask = masked_average_layer.compute_mask(inputs, mask=mask)
expected_average = tf.constant([
[1.3 / 3, 0.5 / 3],
[0.4, 0.1],
[0.9, 0.4],
[0.0, 0.0]
])
expected_mask = None
tf.debugging.assert_near(expected_average, output_average)
self.assertEqual(expected_mask, output_mask)
def test_masked_average_4d(self):
masked_average_layer = keras_layers.MaskedAverage(2)
inputs = tf.constant([
[[[0.5, 0.3], [0.4, 0.1], [0.4, 0.1]],
[[0.6, 0.8], [0.5, 0.4], [0.4, 0.1]]],
[[[0.6, 0.8], [0.5, 0.4], [0.4, 0.1]],
[[0.6, 0.8], [0.5, 0.4], [0.4, 0.1]]],
[[[0.9, 0.4], [0.4, 0.1], [0.4, 0.1]],
[[0.6, 0.8], [0.5, 0.4], [0.4, 0.1]]],
[[[0.9, 0.4], [0.4, 0.1], [0.4, 0.1]],
[[0.6, 0.8], [0.5, 0.4], [0.4, 0.1]]],
])
mask = tf.constant([[[True, True, True], [True, False, True]],
[[False, False, True], [False, False, False]],
[[True, False, False], [True, True, True]],
[[False, False, False], [True, False, False]]])
output_average = masked_average_layer.call(inputs, mask=mask)
output_mask = masked_average_layer.compute_mask(inputs, mask=mask)
expected_average = tf.constant([
[[1.3 / 3, 0.5 / 3], [0.5, 0.45]],
[[0.4, 0.1], [0.0, 0.0]],
[[0.9, 0.4], [0.5, 1.3 / 3]],
[[0.0, 0.0], [0.6, 0.8]],
])
expected_mask = tf.constant([[True, True],
[True, False],
[True, True],
[False, True]])
tf.debugging.assert_near(expected_average, output_average)
tf.debugging.assert_equal(expected_mask, output_mask)
def test_masked_average_raises_error(self):
masked_average_layer = keras_layers.MaskedAverage(1)
inputs = tf.constant([
[[0.5, 0.3], [0.4, 0.1], [0.4, 0.1]],
[[0.6, 0.8], [0.5, 0.4], [0.4, 0.1]],
[[0.9, 0.4], [0.4, 0.1], [0.4, 0.1]],
])
mask = None
with self.assertRaises(ValueError):
masked_average_layer.call(inputs, mask=mask)
with self.assertRaises(ValueError):
masked_average_layer.compute_mask(inputs, mask=mask)
def test_masked_reshape(self):
masked_reshape_layer = keras_layers.MaskedReshape((4, 4, 2, 1), (4, 4, 2))
inputs = tf.constant([
[[1.0], [2.0], [0.5], [0.4], [0.4], [0.1], [0.0], [0.0]],
[[0.4], [0.1], [0.0], [0.0], [0.0], [0.0], [0.6], [0.8]],
[[0.9], [0.4], [0.5], [3.0], [0.9], [0.4], [0.5], [3.0]],
[[0.0], [0.0], [0.6], [0.8], [0.4], [0.1], [0.0], [0.0]],
])
mask = tf.constant(
[[True, False, True, True, True, False, False, False],
[True, False, True, True, True, True, False, True],
[False, True, True, False, True, True, True, True],
[False, True, True, True, True, False, False, True]])
output = masked_reshape_layer.call(inputs, mask=mask)
output_mask = masked_reshape_layer.compute_mask(inputs, mask=mask)
expected_output = tf.constant([
[[[1.0], [2.0]], [[0.5], [0.4]], [[0.4], [0.1]], [[0.0], [0.0]]],
[[[0.4], [0.1]], [[0.0], [0.0]], [[0.0], [0.0]], [[0.6], [0.8]]],
[[[0.9], [0.4]], [[0.5], [3.0]], [[0.9], [0.4]], [[0.5], [3.0]]],
[[[0.0], [0.0]], [[0.6], [0.8]], [[0.4], [0.1]], [[0.0], [0.0]]],
])
expected_mask = tf.constant(
[[[True, False], [True, True], [True, False], [False, False]],
[[True, False], [True, True], [True, True], [False, True]],
[[False, True], [True, False], [True, True], [True, True]],
[[False, True], [True, True], [True, False], [False, True]]])
tf.debugging.assert_near(expected_output, output)
tf.debugging.assert_equal(expected_mask, output_mask)
def test_masked_reshape_unknown_batch_size(self):
masked_reshape_layer = keras_layers.MaskedReshape((-1, 4, 2, 1), (-1, 4, 2))
inputs = tf.constant([
[[1.0], [2.0], [0.5], [0.4], [0.4], [0.1], [0.0], [0.0]],
[[0.4], [0.1], [0.0], [0.0], [0.0], [0.0], [0.6], [0.8]],
[[0.9], [0.4], [0.5], [3.0], [0.9], [0.4], [0.5], [3.0]],
[[0.0], [0.0], [0.6], [0.8], [0.4], [0.1], [0.0], [0.0]],
])
mask = tf.constant(
[[True, False, True, True, True, False, False, False],
[True, False, True, True, True, True, False, True],
[False, True, True, False, True, True, True, True],
[False, True, True, True, True, False, False, True]])
output = masked_reshape_layer.call(inputs, mask=mask)
output_mask = masked_reshape_layer.compute_mask(inputs, mask=mask)
expected_output = tf.constant([
[[[1.0], [2.0]], [[0.5], [0.4]], [[0.4], [0.1]], [[0.0], [0.0]]],
[[[0.4], [0.1]], [[0.0], [0.0]], [[0.0], [0.0]], [[0.6], [0.8]]],
[[[0.9], [0.4]], [[0.5], [3.0]], [[0.9], [0.4]], [[0.5], [3.0]]],
[[[0.0], [0.0]], [[0.6], [0.8]], [[0.4], [0.1]], [[0.0], [0.0]]],
])
expected_mask = tf.constant(
[[[True, False], [True, True], [True, False], [False, False]],
[[True, False], [True, True], [True, True], [False, True]],
[[False, True], [True, False], [True, True], [True, True]],
[[False, True], [True, True], [True, False], [False, True]]])
tf.debugging.assert_near(expected_output, output)
tf.debugging.assert_equal(expected_mask, output_mask)
def test_masked_reshape_raises_error(self):
masked_reshape_layer = keras_layers.MaskedReshape((-1, 4, 2, 1), (-1, 4, 2))
inputs = tf.constant([
[[1.0], [2.0], [0.5], [0.4], [0.4], [0.1], [0.0], [0.0]],
[[0.4], [0.1], [0.0], [0.0], [0.0], [0.0], [0.6], [0.8]],
[[0.9], [0.4], [0.5], [3.0], [0.9], [0.4], [0.5], [3.0]],
[[0.0], [0.0], [0.6], [0.8], [0.4], [0.1], [0.0], [0.0]],
])
mask = None
with self.assertRaises(ValueError):
masked_reshape_layer.call(inputs, mask=mask)
with self.assertRaises(ValueError):
masked_reshape_layer.compute_mask(inputs, mask=mask)
def test_embedding_spreadout_regularizer_dot_product(self):
weights = tf.constant(
[[1.0, 0.0, 0.0],
[2.0, 2.0, 2.0],
[0.1, 0.2, 0.3],
[0.3, 0.2, 0.1],
[0.0, 1.0, 0.0]])
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.1,
normalization_fn=None,
l2_regularization=0.0)
# Similarities without diagonal looks like:
# 0.0 2.0 0.1 0.3 0.0
# 2.0 0.0 1.2 1.2 2.0
# 0.1 1.2 0.0 0.1 0.2
# 0.3 1.2 0.1 0.0 0.2
# 0.0 2.0 0.2 0.2 0.0
loss = regularizer(weights)
# L2 norm of above similarities.
expected_loss = 0.47053161424
tf.debugging.assert_near(expected_loss, loss)
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.1,
normalization_fn=None,
l2_regularization=1.0)
l2_regularizer = tf.keras.regularizers.l2(1.0)
loss = regularizer(weights)
expected_loss = 0.47053161424 + l2_regularizer(weights)
tf.debugging.assert_near(expected_loss, loss)
def test_embedding_spreadout_regularizer_cosine_similarity(self):
weights = tf.constant(
[[1.0, 0.0, 0.0],
[2.0, 2.0, 2.0],
[0.1, 0.2, 0.3],
[0.3, 0.2, 0.1],
[0.0, 1.0, 0.0]])
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.1,
normalization_fn=l2_normalize_fn,
l2_regularization=0.0)
loss = regularizer(weights)
# L2 norm of above similarities.
expected_loss = 0.2890284
tf.debugging.assert_near(expected_loss, loss)
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.1,
normalization_fn=l2_normalize_fn,
l2_regularization=1.0)
l2_regularizer = tf.keras.regularizers.l2(1.0)
loss = regularizer(weights)
expected_loss = 0.2890284 + l2_regularizer(weights)
tf.debugging.assert_near(expected_loss, loss)
def test_embedding_spreadout_regularizer_no_spreadout(self):
weights = tf.constant(
[[1.0, 0.0, 0.0],
[2.0, 2.0, 2.0],
[0.1, 0.2, 0.3],
[0.3, 0.2, 0.1],
[0.0, 1.0, 0.0]])
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.0,
normalization_fn=None,
l2_regularization=0.0)
loss = regularizer(weights)
expected_loss = 0.0
tf.debugging.assert_near(expected_loss, loss)
# Test that L2 normalization behaves normally.
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.0,
normalization_fn=None,
l2_regularization=0.1)
l2_regularizer = tf.keras.regularizers.l2(0.1)
loss = regularizer(weights)
l2_loss = l2_regularizer(weights)
tf.debugging.assert_near(l2_loss, loss)
# Test that normalization_fn has no effect.
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.0,
normalization_fn=l2_normalize_fn,
l2_regularization=0.1)
l2_regularizer = tf.keras.regularizers.l2(0.1)
loss = regularizer(weights)
l2_loss = l2_regularizer(weights)
tf.debugging.assert_near(l2_loss, loss)
def test_embedding_spreadout_regularizer_get_config(self):
weights = tf.constant(
[[1.0, 0.0, 0.0],
[2.0, 2.0, 2.0],
[0.1, 0.2, 0.3],
[0.3, 0.2, 0.1],
[0.0, 1.0, 0.0]])
regularizer = keras_layers.EmbeddingSpreadoutRegularizer(
spreadout_lambda=0.0,
normalization_fn=l2_normalize_fn,
l2_regularization=0.1)
config = regularizer.get_config()
expected_config = {
'spreadout_lambda': 0.0,
'normalization_fn': l2_normalize_fn,
'l2_regularization': 0.1
}
new_regularizer = (
keras_layers.EmbeddingSpreadoutRegularizer.from_config(config))
l2_regularizer = tf.keras.regularizers.l2(0.1)
loss = new_regularizer(weights)
l2_loss = l2_regularizer(weights)
self.assertEqual(config, expected_config)
tf.debugging.assert_near(l2_loss, loss)
if __name__ == '__main__':
absltest.main()
| 35.541538
| 80
| 0.565665
| 1,760
| 11,551
| 3.585227
| 0.089773
| 0.055468
| 0.050872
| 0.046276
| 0.830428
| 0.816482
| 0.810143
| 0.733281
| 0.69477
| 0.680032
| 0
| 0.100658
| 0.237122
| 11,551
| 324
| 81
| 35.651235
| 0.615411
| 0.072808
| 0
| 0.740586
| 0
| 0
| 0.005334
| 0
| 0
| 0
| 0
| 0
| 0.087866
| 1
| 0.041841
| false
| 0
| 0.012552
| 0
| 0.058577
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bf89255bb6d0dfd86da946f296e7eeaddd6b7b8e
| 71
|
py
|
Python
|
verboselib/__init__.py
|
oblalex/verboselib
|
5e0c4d8f2b9f2c4bc3bf4b79a5fffc1d6db6e44e
|
[
"MIT"
] | 3
|
2015-10-28T07:30:23.000Z
|
2017-09-25T07:52:40.000Z
|
verboselib/__init__.py
|
oblalex/verboselib
|
5e0c4d8f2b9f2c4bc3bf4b79a5fffc1d6db6e44e
|
[
"MIT"
] | 2
|
2015-01-07T14:25:52.000Z
|
2020-10-11T20:33:30.000Z
|
verboselib/__init__.py
|
oblalex/verboselib
|
5e0c4d8f2b9f2c4bc3bf4b79a5fffc1d6db6e44e
|
[
"MIT"
] | null | null | null |
from .core import *
from .helpers import *
from .translations import *
| 17.75
| 27
| 0.746479
| 9
| 71
| 5.888889
| 0.555556
| 0.377358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169014
| 71
| 3
| 28
| 23.666667
| 0.898305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bfb2e62647f64a72c90f157a320ccddde03b2323
| 1,651
|
py
|
Python
|
tasks/EPAM/pytasks/task04-03.py
|
AleksNeStu/projects
|
1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb
|
[
"Apache-2.0"
] | 2
|
2022-01-19T18:01:35.000Z
|
2022-02-06T06:54:38.000Z
|
tasks/EPAM/pytasks/task04-03.py
|
AleksNeStu/projects
|
1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb
|
[
"Apache-2.0"
] | null | null | null |
tasks/EPAM/pytasks/task04-03.py
|
AleksNeStu/projects
|
1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'AleksNeStu'
# Task04-03 (not mandatory):
# Create **very** simple [ORM] using data descriptors like in `Subtask 1` and [SQLite]3 python module to store fields in Data Base.
# After creating instances of model all fields mast be stored in SQLite DB.
# Example:
# ```python
# >>> from ormapi import Model, BirthdayField, NameField, PhoneField
# >>> class Person(Model):
# __table__ = "persons"
# ... name = NameField()
# ... birthday = BirthdayField()
# ... phone = PhoneField()
# ...
# >>> p = Person() # New row in table *persons* are created with default values for fields.
# >>> p.name = "Aleks" # Cell updated with new value.
# >>> # Or you can create special method to save (commit) the values to DB like bellow.
# >>> p.phone = "375 25 5443322" # Not yet stored in DB.
# >>> p.save() # All changes commited to DB.
# ```
# Addition info:
# [ORM]: https://en.wikipedia.org/wiki/Object-relational_mapping
# [SQLite]: https://en.wikipedia.org/wiki/SQLite
# Input
from EPAM.ormapi import Model, BirthdayField, NameField, PhoneField
class Person(Model):
# [ORM] used data descriptors like in `Subtask 1` and [SQLite]3 python module to store fields in Data Base
__table__ = "persons"
name = NameField()
birthday = BirthdayField()
phone = PhoneField()
p = Person() # New row in table *persons* are created with default values for fields
p.name = "Aleks" # Cell updated with new value
# Create special method to save (commit) the values to DB like bellow
p.phone = "375 25 5443322" # Not yet stored in DB
p.save() # All changes commited to DB
| 35.891304
| 131
| 0.677165
| 229
| 1,651
| 4.825328
| 0.419214
| 0.043439
| 0.034389
| 0.038009
| 0.78552
| 0.743891
| 0.743891
| 0.743891
| 0.743891
| 0.626244
| 0
| 0.024812
| 0.194428
| 1,651
| 46
| 132
| 35.891304
| 0.806015
| 0.77771
| 0
| 0
| 0
| 0
| 0.108108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
bfc8b16380c8841cf499d824a1ceefb407a9f436
| 92
|
py
|
Python
|
RL_based_ATSC/single-intersection/SALT/rl/agents/__init__.py
|
sue04206/traffic-signal-optimization
|
f0891c0df8a3f84bf5011af85467e67a0091371b
|
[
"Apache-2.0"
] | 6
|
2020-08-27T05:45:27.000Z
|
2021-12-27T05:11:29.000Z
|
RL_based_ATSC/single-intersection/SALT/rl/agents/__init__.py
|
sue04206/traffic-signal-optimization
|
f0891c0df8a3f84bf5011af85467e67a0091371b
|
[
"Apache-2.0"
] | null | null | null |
RL_based_ATSC/single-intersection/SALT/rl/agents/__init__.py
|
sue04206/traffic-signal-optimization
|
f0891c0df8a3f84bf5011af85467e67a0091371b
|
[
"Apache-2.0"
] | 3
|
2021-12-14T06:59:52.000Z
|
2022-02-21T04:37:49.000Z
|
from __future__ import absolute_import
from .dqn import Learner, get_state_1d, get_state_2d
| 30.666667
| 52
| 0.858696
| 15
| 92
| 4.666667
| 0.666667
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.108696
| 92
| 2
| 53
| 46
| 0.829268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bfd97c74a4a0dbfc00330f2996486ff74acd2275
| 2,232
|
py
|
Python
|
exp2/layers.py
|
raghakot/deep-learning-experiments
|
8ae0a67825ba0d6d863fd253d534754c21d87056
|
[
"MIT"
] | 7
|
2017-10-29T10:25:46.000Z
|
2021-07-11T06:07:26.000Z
|
exp2/layers.py
|
raghakot/deep-learning-experiments
|
8ae0a67825ba0d6d863fd253d534754c21d87056
|
[
"MIT"
] | null | null | null |
exp2/layers.py
|
raghakot/deep-learning-experiments
|
8ae0a67825ba0d6d863fd253d534754c21d87056
|
[
"MIT"
] | null | null | null |
from keras.layers.convolutional import Convolution2D
from keras import backend as K
import tensorflow as tf
permutation = [[1, 0], [0, 0], [0, 1], [2, 0], [1, 1], [0, 2], [2, 1], [2, 2], [1, 2]]
def shift_rotate(w, shift=1):
shape = w.get_shape()
for i in range(shift):
w = tf.reshape(tf.gather_nd(w, permutation), shape)
return w
class Convolution2D_4(Convolution2D):
def call(self, x, mask=None):
w = self.W
w_rot = [w]
for i in range(3):
w = shift_rotate(w, shift=2)
w_rot.append(w)
outputs = tf.stack([K.conv2d(x, w_i, strides=self.subsample,
border_mode=self.border_mode,
dim_ordering=self.dim_ordering,
filter_shape=self.W_shape) for w_i in w_rot])
output = K.max(outputs, 0)
if self.bias:
if self.dim_ordering == 'th':
output += K.reshape(self.b, (1, self.nb_filter, 1, 1))
elif self.dim_ordering == 'tf':
output += K.reshape(self.b, (1, 1, 1, self.nb_filter))
else:
raise ValueError('Invalid dim_ordering:', self.dim_ordering)
output = self.activation(output)
return output
class Convolution2D_8(Convolution2D):
def call(self, x, mask=None):
w = self.W
w_rot = [w]
for i in range(7):
w = shift_rotate(w)
w_rot.append(w)
outputs = tf.stack([K.conv2d(x, w_i, strides=self.subsample,
border_mode=self.border_mode,
dim_ordering=self.dim_ordering,
filter_shape=self.W_shape) for w_i in w_rot])
output = K.max(outputs, 0)
if self.bias:
if self.dim_ordering == 'th':
output += K.reshape(self.b, (1, self.nb_filter, 1, 1))
elif self.dim_ordering == 'tf':
output += K.reshape(self.b, (1, 1, 1, self.nb_filter))
else:
raise ValueError('Invalid dim_ordering:', self.dim_ordering)
output = self.activation(output)
return output
| 33.818182
| 86
| 0.521953
| 295
| 2,232
| 3.810169
| 0.216949
| 0.117438
| 0.106762
| 0.064057
| 0.743772
| 0.743772
| 0.743772
| 0.743772
| 0.743772
| 0.743772
| 0
| 0.031425
| 0.358423
| 2,232
| 65
| 87
| 34.338462
| 0.753492
| 0
| 0
| 0.705882
| 0
| 0
| 0.022401
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.058824
| 0
| 0.215686
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
44a619a45c4e6e647cddf2da0098dfef69c02e20
| 1,150
|
py
|
Python
|
47/test_password.py
|
alehpineda/bitesofpy
|
bfd319a606cd0b7b9bfb85a3e8942872a2d43c48
|
[
"MIT"
] | null | null | null |
47/test_password.py
|
alehpineda/bitesofpy
|
bfd319a606cd0b7b9bfb85a3e8942872a2d43c48
|
[
"MIT"
] | 2
|
2020-09-24T11:25:29.000Z
|
2021-06-25T15:43:35.000Z
|
47/test_password.py
|
alehpineda/bitesofpy
|
bfd319a606cd0b7b9bfb85a3e8942872a2d43c48
|
[
"MIT"
] | null | null | null |
from password import validate_password, used_passwords
def test_password_len():
assert not validate_password("short")
assert not validate_password("waytoolongpassword")
def test_password_missing_chars():
assert not validate_password("UPPERCASE")
assert not validate_password("lowercase")
assert not validate_password("PW_no_digits")
assert not validate_password("Pw9NoPunc")
assert not validate_password("_password_")
assert not validate_password("@#$$)==1")
def test_password_only_one_letter():
assert not validate_password("@#$$)==1a")
def test_validate_password_good_pws():
assert validate_password("passWord9_")
assert validate_password("another>4Y")
assert validate_password("PyBites@1912")
assert validate_password("We<3Python")
def test_password_not_used_before():
assert not validate_password("PassWord@1")
assert not validate_password("PyBit$s9")
def test_password_cache_cannot_reuse():
num_passwords_use = len(used_passwords)
assert validate_password("go1@PW")
assert len(used_passwords) == num_passwords_use + 1
assert not validate_password("go1@PW")
| 29.487179
| 55
| 0.76
| 143
| 1,150
| 5.755245
| 0.307692
| 0.36938
| 0.247874
| 0.36452
| 0.143378
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015152
| 0.13913
| 1,150
| 38
| 56
| 30.263158
| 0.816162
| 0
| 0
| 0
| 0
| 0
| 0.14
| 0
| 0
| 0
| 0
| 0
| 0.692308
| 1
| 0.230769
| false
| 1
| 0.038462
| 0
| 0.269231
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
44a80c8d02ff6b81baac2b61a842eb9e6343f3cd
| 3,935
|
py
|
Python
|
day1/part1.py
|
cheddar-cheeze/advent-of-code-2018
|
a9d21b21d64d5bc35f5f3a1481c0af8b0499b0d0
|
[
"BSD-2-Clause"
] | null | null | null |
day1/part1.py
|
cheddar-cheeze/advent-of-code-2018
|
a9d21b21d64d5bc35f5f3a1481c0af8b0499b0d0
|
[
"BSD-2-Clause"
] | null | null | null |
day1/part1.py
|
cheddar-cheeze/advent-of-code-2018
|
a9d21b21d64d5bc35f5f3a1481c0af8b0499b0d0
|
[
"BSD-2-Clause"
] | null | null | null |
#! /bin/python3
import requests
text = """
+6
-17
+16
+7
+12
+2
-7
-5
-4
-16
+2
+12
-16
-1
-12
-3
+8
-12
+8
-3
+18
-9
+1
-20
+15
-11
-18
-8
+18
-4
+10
+1
-2
+13
+12
+16
-6
+12
+2
+11
+5
+1
-14
+16
-20
-5
+20
+6
+13
+11
+3
-9
-15
+1
+19
+8
+19
-16
+19
-17
-17
+19
+2
+5
+16
+2
-4
+19
+6
-16
+15
+7
+5
-18
+19
-8
+9
+15
-3
-3
-15
+13
-1
+10
+13
+16
-4
+10
-12
+10
+17
-2
-17
+10
+17
+8
+7
+5
+17
+10
+14
-17
-13
+19
+6
-11
+10
+8
+1
-18
-5
-8
-19
+12
+12
-4
+8
-10
-9
+2
+11
-6
+23
+17
-10
+5
+9
+19
+4
-1
+6
+12
-16
+9
+6
-9
+4
-2
-14
-5
-9
+13
+10
-2
+17
+10
+13
-5
-12
+19
+10
+16
+13
-19
-17
+4
+12
+18
-14
+5
+17
-5
-4
+18
-5
-1
-18
+11
+12
+15
-9
+3
-7
+8
-16
+9
-13
-10
-11
+3
+2
+19
+21
+6
+6
+2
-5
-6
+13
-18
-15
-16
+8
-13
-14
+9
-3
-8
-11
+9
-15
-16
+4
+13
-18
+10
-3
+15
+8
-18
+13
-5
+18
+15
-20
+19
+26
+7
-4
+25
-8
-19
+13
-2
-8
+11
+22
-16
+9
-1
+20
+12
-9
+10
-8
+9
+19
+17
+10
+4
-11
+15
+5
+4
-11
+19
-7
-6
-9
-18
+2
-9
+16
-17
+21
+16
+7
+1
+11
-4
-20
+31
-1
+2
+17
-12
-12
+18
-16
+18
-6
+19
-5
+1
+11
-8
-1
-16
+15
+17
+4
+1
-8
+15
-3
+18
-9
-15
-16
-18
+7
-22
+21
-13
-19
+3
-25
+5
-8
-19
-18
-6
+12
-11
+19
+6
+19
+70
-12
-15
+3
+25
+1
+18
-12
+21
+14
+2
+6
-13
-12
+6
+3
+13
+2
+11
-9
+1
-13
+15
+15
+1
+10
-9
+5
+13
-17
+10
+3
+12
-7
-4
+6
+3
-16
+15
+13
-9
+12
+20
+10
+3
+14
+16
-1
-13
-9
+8
+2
+17
-13
+10
+1
+11
+20
+16
-14
+11
-17
-2
+20
+7
-24
-7
+14
-1
+5
+10
+26
-13
+30
+9
-6
+13
+31
+14
-1
+4
-24
-59
-10
+8
-20
-11
-14
-10
-6
-15
-3
-14
+10
+16
+2
-5
+4
-18
-13
-7
-11
-5
-7
-16
-15
-1
-10
+16
+8
-17
+12
-18
-5
-25
+17
-12
-10
-3
-10
+11
+4
-7
+19
+51
+4
+6
-5
-11
-2
+15
-6
-5
-29
-20
+6
+6
-4
-28
-13
-2
-15
-16
-64
+23
-36
-25
-21
-52
-118
-18
-28
+25
-6
+29
+26
+46
-69
-37
-75
-100
-463
-63213
+11
+13
-21
-4
+17
+29
-10
-16
+2
-16
+7
+11
-16
-9
-21
+6
-1
-8
-3
+7
-5
-18
+14
-10
+1
+7
+9
-19
+7
-18
-7
+8
-7
+10
+4
+7
+9
+18
+55
+21
+16
-13
-61
+11
-45
-35
-14
-8
-10
-12
+6
-7
+17
-2
-13
-20
+11
-13
+12
+13
-2
-8
+13
-23
-8
-9
+4
+2
+4
-12
-11
-11
-9
+11
+14
-18
-16
+19
-13
-14
+7
-6
-9
-16
-5
+15
+1
-8
+15
+15
+12
+10
+16
-18
+3
-17
-10
-16
-15
-16
+1
+4
+14
-10
-6
+12
-9
-8
-20
-7
+4
+6
-22
+10
-15
-13
+15
-12
+3
-14
-17
+18
-20
-19
+14
+2
-9
-16
+11
-19
-1
-19
-3
-12
-16
-4
-2
+7
-12
+14
+4
+14
+13
+1
+4
+10
+14
-16
-2
-19
-8
-8
-22
-23
+10
+1
-14
-17
-17
-6
+9
-16
+4
+11
-14
+11
-17
-9
-8
-17
-13
+17
-15
-8
-16
-3
-5
-8
-17
+2
+5
+2
-19
-1
-12
+1
+16
-8
-19
-18
-2
-15
+7
-11
-6
+12
-14
-18
+15
-18
-12
+10
+12
-17
-13
-18
+13
-11
+15
-9
+8
+11
-20
+17
+9
+18
-29
+3
-14
-14
+2
-12
-5
-19
-19
-16
-15
+13
-8
-3
-17
-4
-9
+17
-12
+17
-10
+22
+6
-9
+6
-5
+17
+17
-5
-16
-16
-13
-13
+5
+6
-24
-3
-16
-12
+7
+18
-4
+13
-8
+13
+8
+13
-18
-5
-14
+17
-11
+1
-21
-13
-10
-16
+4
+16
-18
+13
-19
-10
-2
+7
-23
+8
-3
-2
-36
-5
-6
+10
-19
+11
-19
+7
+17
+13
-6
+12
-3
+14
+11
+18
+7
+13
+4
+19
-1
+12
+1
-5
-6
-6
+1
+2
+7
-14
-3
-2
+33
+14
+25
+28
+8
+8
+11
+19
-6
+12
-16
-13
-9
+7
-2
+9
-19
-19
+12
-8
+26
-16
-16
+19
+18
+19
-18
+15
-10
-2
+8
-5
+1
-9
-25
+7
+16
+17
+12
+7
+3
+2
-16
+30
+4
+5
-13
-14
-16
+7
-12
+18
+6
+25
+24
+1
+9
+6
-3
-11
+20
-2
+18
+12
+18
-11
-2
+15
+8
-11
-16
-6
+20
+11
+11
+16
+2
+18
+12
-17
+12
+11
-9
+16
+8
-10
-7
+19
+2
+13
+12
-20
-25
+2
+9
-13
+19
-4
-10
-17
+7
-9
+10
+15
-13
-25
-1
-10
+13
-12
+7
-18
-7
-17
+9
-4
-17
-11
-7
-5
-1
-16
+14
+20
-13
+42
-25
-27
-39
+12
-41
+3
+19
-2
-23
-12
-48
-35
+6
-116
+16
+18
-2
+7
-11
-40
+41
-155
+5
+125
-238
-1
-63346
-9
-18
-3
-11
+13
-4
-3
-4
-3
+23
+11
+14
+12
+3
+7
+1
-18
+15
-7
-15
-1
-9
+14
-10
-22
-1
-24
-13
+6
-14
-7
-8
-7
-1
-5
+9
+6
+19
+13
+3
-19
-13
-15
-2
+15
+3
+24
+26
+72
-16
+14
+16
+17
+14
-4
-3
+10
+3
+11
+12
+15
+9
+12
+5
+11
-35
-23
+11
-10
-5
+3
-15
-15
+128514
"""
a = 0
for num in text.splitlines():
if '-' in num:
a = a - int(num.replace('-', ''))
if '+' in num:
a = a + int(num.replace('+', ''))
else:
pass
print("the value is {}".format(a,))
| 3.812984
| 41
| 0.448539
| 1,050
| 3,935
| 1.680952
| 0.079048
| 0.01813
| 0.005666
| 0.007932
| 0.024929
| 0.024929
| 0.024929
| 0.024929
| 0
| 0
| 0
| 0.577817
| 0.278272
| 3,935
| 1,031
| 42
| 3.816683
| 0.043662
| 0.003558
| 0
| 0.957115
| 0
| 0
| 0.940561
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.000975
| 0.000975
| 0
| 0.000975
| 0.000975
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
44f84155ea52a00f6acf2c0e6f5eb6fbf1275d27
| 1,353
|
py
|
Python
|
library/pimoroni_physical_feather_pins/nRF52840.py
|
dglaude/physical_feather_pins
|
ec39620f0e15e9df58829a7d697f94b4aa54d52c
|
[
"MIT"
] | 2
|
2019-10-01T21:08:47.000Z
|
2021-05-22T00:56:17.000Z
|
library/pimoroni_physical_feather_pins/nRF52840.py
|
dglaude/physical_feather_pins
|
ec39620f0e15e9df58829a7d697f94b4aa54d52c
|
[
"MIT"
] | 5
|
2019-09-30T21:10:08.000Z
|
2021-04-23T21:11:32.000Z
|
library/pimoroni_physical_feather_pins/nRF52840.py
|
dglaude/physical_feather_pins
|
ec39620f0e15e9df58829a7d697f94b4aa54d52c
|
[
"MIT"
] | 3
|
2020-06-25T22:10:40.000Z
|
2021-09-06T11:08:22.000Z
|
from . import pin_error
import microcontroller
def pin3():
return microcontroller.pin.P0_31
def pin5():
return microcontroller.pin.P0_04
def pin6():
return microcontroller.pin.P0_05
def pin7():
return microcontroller.pin.P0_30
def pin8():
return microcontroller.pin.P0_28
def pin9():
return microcontroller.pin.P0_02
def pin10():
return microcontroller.pin.P0_03
def pin11():
return microcontroller.pin.P0_14
def pin12():
return microcontroller.pin.P0_13
def pin13():
return microcontroller.pin.P0_15
def pin14():
return microcontroller.pin.P0_24
def pin15():
return microcontroller.pin.P0_25
def pin16():
return microcontroller.pin.P0_10
def pin17():
return microcontroller.pin.P0_12
def pin18():
return microcontroller.pin.P0_11
def pin19():
return microcontroller.pin.P1_08
def pin20():
return microcontroller.pin.P0_07
def pin21():
return microcontroller.pin.P0_26
def pin22():
return microcontroller.pin.P0_27
def pin23():
return microcontroller.pin.P0_06
def pin24():
return microcontroller.pin.P0_08
def pin25():
return microcontroller.pin.P1_09
def init(scope):
"""Pull the pin definitions into the main module namespace"""
for key in globals().keys():
if key.startswith('pin'):
scope[key] = globals()[key]
| 18.283784
| 65
| 0.711013
| 185
| 1,353
| 5.075676
| 0.367568
| 0.492013
| 0.5623
| 0.553781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09446
| 0.186253
| 1,353
| 74
| 66
| 18.283784
| 0.758401
| 0.04065
| 0
| 0
| 0
| 0
| 0.00232
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.46
| false
| 0
| 0.04
| 0.44
| 0.94
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
78080cb4aa79a7c478a381c61a41bd9a35fd5b9f
| 43
|
py
|
Python
|
tasks/snli/third_party/datasets/__init__.py
|
etri-edgeai/nn-comp-discblock
|
6e00a019c223508797ca91a7d5ffec7917b12c6d
|
[
"Apache-2.0"
] | 10
|
2021-11-19T06:24:51.000Z
|
2022-02-09T15:44:00.000Z
|
tasks/snli/third_party/datasets/__init__.py
|
etri-edgeai/nn-comp-discblock
|
6e00a019c223508797ca91a7d5ffec7917b12c6d
|
[
"Apache-2.0"
] | 9
|
2021-10-01T11:06:27.000Z
|
2021-12-23T02:10:52.000Z
|
tasks/snli/third_party/datasets/__init__.py
|
etri-edgeai/nn-comp-discblock
|
6e00a019c223508797ca91a7d5ffec7917b12c6d
|
[
"Apache-2.0"
] | 2
|
2021-09-14T04:08:36.000Z
|
2021-11-19T06:24:54.000Z
|
from .snli import *
from .multinli import *
| 21.5
| 23
| 0.744186
| 6
| 43
| 5.333333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162791
| 43
| 2
| 23
| 21.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
782bb78971f998537681c187fc87534c436ef3bf
| 224
|
py
|
Python
|
aiakos/__main__.py
|
aiakos/aiakos
|
a591e7ef13ab9e8e14b4d3569d43fce694c4150a
|
[
"BSD-2-Clause",
"MIT"
] | 4
|
2017-04-28T19:09:17.000Z
|
2018-07-03T04:43:54.000Z
|
aiakos/__main__.py
|
aiakos/aiakos
|
a591e7ef13ab9e8e14b4d3569d43fce694c4150a
|
[
"BSD-2-Clause",
"MIT"
] | 2
|
2020-06-05T17:46:47.000Z
|
2021-06-10T17:22:58.000Z
|
aiakos/__main__.py
|
aiakos/aiakos
|
a591e7ef13ab9e8e14b4d3569d43fce694c4150a
|
[
"BSD-2-Clause",
"MIT"
] | 2
|
2017-08-14T07:15:14.000Z
|
2019-03-04T14:02:05.000Z
|
#!/usr/bin/env python
import os
import sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "aiakos.settings")
from django.core.management import execute_from_command_line # isort:skip
execute_from_command_line(sys.argv)
| 22.4
| 74
| 0.816964
| 33
| 224
| 5.30303
| 0.666667
| 0.125714
| 0.205714
| 0.251429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084821
| 224
| 9
| 75
| 24.888889
| 0.853659
| 0.138393
| 0
| 0
| 0
| 0
| 0.193717
| 0.115183
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
786d4cec93fdd13c3cfd6d1c7e7e329b50dd63e7
| 11,291
|
py
|
Python
|
tests/functional/test_filesystem.py
|
rena2damas/filemanager-service
|
75da1d876451b05c559b9bac7eebd8beacbe0453
|
[
"MIT"
] | null | null | null |
tests/functional/test_filesystem.py
|
rena2damas/filemanager-service
|
75da1d876451b05c559b9bac7eebd8beacbe0453
|
[
"MIT"
] | null | null | null |
tests/functional/test_filesystem.py
|
rena2damas/filemanager-service
|
75da1d876451b05c559b9bac7eebd8beacbe0453
|
[
"MIT"
] | null | null | null |
import io
import subprocess
from base64 import b64encode
import pytest
from src.api.auth import AuthAPI
@pytest.fixture()
def auth(mocker):
mocker.patch.object(AuthAPI, "authenticate", return_value=True)
return {"Authorization": f"Basic {b64encode(b'user:pass').decode()}"}
class TestFilesystemGET:
def test_supported_paths(self, client):
response = client.get("/filesystem/supported-paths")
assert response.status_code == 200
def test_unauthorized_request_throws_401(self, client):
response = client.get("/filesystem/tmp/", headers={})
assert response.status_code == 401
def test_unsupported_value_throws_400(self, client, auth):
response = client.get("/filesystem/unsupported/", headers=auth)
assert response.status_code == 400
def test_valid_path_returns_200(self, client, auth, mocker):
mocker.patch("src.utils.shell", return_value="file.txt")
response = client.get("/filesystem/tmp/", headers=auth)
assert response.status_code == 200
assert response.json == ["file.txt"]
def test_error_path_returns_400(self, client, auth, mocker):
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr="err")
mocker.patch("src.utils.shell", side_effect=err)
response = client.get("/filesystem/tmp/invalid/", headers=auth)
assert response.status_code == 400
assert response.json == {"code": 400, "message": "err", "reason": "Bad Request"}
def test_permission_denied_returns_403(self, client, auth, mocker):
stderr = "/tmp/root/: Permission denied"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.get("/filesystem/tmp/root/", headers=auth)
assert response.status_code == 403
assert response.json == {
"code": 403,
"message": "permission denied",
"reason": "Forbidden",
}
def test_missing_path_returns_404(self, client, auth, mocker):
stderr = "/tmp/missing/: No such file or directory"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.get("/filesystem/tmp/missing/", headers=auth)
assert response.status_code == 404
assert response.json == {
"code": 404,
"message": "no such file or directory",
"reason": "Not Found",
}
def test_file_attachment_returns_200(self, client, auth, mocker):
mocker.patch("src.utils.shell", side_effect=["file.txt", b""])
mocker.patch("src.utils.isfile", return_value=True)
headers = {**auth, "accept": "application/octet-stream"}
response = client.get("/filesystem/tmp/file.txt", headers=headers)
assert response.status_code == 200
assert (
response.headers["Content-Disposition"] == "attachment; filename=file.txt"
)
assert response.headers["Content-Type"] == "text/plain; charset=utf-8"
def test_directory_attachment_returns_200(self, client, auth, mocker):
mocker.patch("src.utils.shell", side_effect=["dir/", b""])
mocker.patch("src.utils.isdir", return_value=True)
headers = {**auth, "accept": "application/octet-stream"}
response = client.get("/filesystem/tmp/dir/", headers=headers)
assert response.status_code == 200
assert (
response.headers["Content-Disposition"] == "attachment; filename=dir.tar.gz"
)
assert response.headers["Content-Type"] == "application/x-tar"
def test_unsupported_accept_header_path_returns_400(self, client, auth):
headers = {**auth, "accept": "text/html"}
response = client.get("/filesystem/tmp/", headers=headers)
assert response.status_code == 400
assert response.json == {
"code": 400,
"message": "unsupported 'accept' HTTP header",
"reason": "Bad Request",
}
class TestFilesystemPOST:
def test_valid_file_returns_201(self, client, auth, mocker):
mocker.patch("src.utils.shell")
response = client.post(
"/filesystem/tmp/",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 201
def test_path_not_a_directory_returns_400(self, client, auth, mocker):
stderr = "/tmp/file.txt: Not a directory"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.post(
"/filesystem/tmp/file.txt",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 400
def test_create_existing_file_returns_400(self, client, auth, mocker):
mocker.patch("src.utils.shell", return_value="file.txt")
response = client.post(
"/filesystem/tmp/file.txt",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 400
def test_permission_denied_returns_403(self, client, auth, mocker):
stderr = "/tmp/root/: Permission denied"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.post(
"/filesystem/tmp/root/",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 403
assert response.json == {
"code": 403,
"message": "permission denied",
"reason": "Forbidden",
}
def test_missing_path_returns_404(self, client, auth, mocker):
stderr = "/tmp/missing/: No such file or directory"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.post(
"/filesystem/tmp/missing/",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 404
assert response.json == {
"code": 404,
"message": "no such file or directory",
"reason": "Not Found",
}
class TestFilesystemPUT:
def test_valid_file_returns_204(self, client, auth, mocker):
mocker.patch("src.utils.shell", return_value="file.txt")
response = client.put(
"/filesystem/tmp/file.txt",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 204
def test_path_not_a_directory_returns_400(self, client, auth, mocker):
stderr = "/tmp/file.txt: Not a directory"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.put(
"/filesystem/tmp/file.txt",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 400
def test_permission_denied_returns_403(self, client, auth, mocker):
stderr = "/tmp/root/: Permission denied"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.put(
"/filesystem/tmp/root/",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 403
assert response.json == {
"code": 403,
"message": "permission denied",
"reason": "Forbidden",
}
def test_missing_path_returns_404(self, client, auth, mocker):
stderr = "/tmp/missing/: No such file or directory"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.put(
"/filesystem/tmp/missing/",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 404
assert response.json == {
"code": 404,
"message": "no such file or directory",
"reason": "Not Found",
}
def test_update_missing_file_returns_404(self, client, auth, mocker):
mocker.patch("src.utils.shell", return_value="")
response = client.put(
"/filesystem/tmp/",
headers=auth,
data={"files": (io.BytesIO(b"text"), "file.txt")},
content_type="multipart/form-data",
)
assert response.status_code == 404
assert response.json == {
"code": 404,
"message": "file does not exist",
"reason": "Not Found",
}
class TestFilesystemDELETE:
def test_valid_file_returns_204(self, client, auth, mocker):
mocker.patch("src.utils.shell")
response = client.delete("/filesystem/tmp/file.txt", headers=auth)
assert response.status_code == 204
def test_path_is_a_directory_returns_400(self, client, auth, mocker):
stderr = "/tmp/dir/: is a directory"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.delete("/filesystem/tmp/dir/", headers=auth)
assert response.status_code == 400
def test_permission_denied_returns_403(self, client, auth, mocker):
stderr = "/tmp/root/: Permission denied"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.delete("/filesystem/tmp/root/", headers=auth)
assert response.status_code == 403
assert response.json == {
"code": 403,
"message": "permission denied",
"reason": "Forbidden",
}
def test_delete_missing_file_returns_404(self, client, auth, mocker):
stderr = "/tmp/file.txt: No such file or directory"
err = subprocess.CalledProcessError(cmd="", returncode=1, stderr=stderr)
mocker.patch("src.utils.shell", side_effect=err)
response = client.delete("/filesystem/tmp/file.txt", headers=auth)
assert response.status_code == 404
assert response.json == {
"code": 404,
"message": "no such file or directory",
"reason": "Not Found",
}
| 40.90942
| 88
| 0.611726
| 1,264
| 11,291
| 5.337816
| 0.099684
| 0.083
| 0.071143
| 0.085371
| 0.882615
| 0.851045
| 0.8174
| 0.802134
| 0.788054
| 0.779754
| 0
| 0.022829
| 0.251262
| 11,291
| 275
| 89
| 41.058182
| 0.775254
| 0
| 0
| 0.666667
| 0
| 0
| 0.21557
| 0.0426
| 0
| 0
| 0
| 0
| 0.164609
| 1
| 0.102881
| false
| 0.004115
| 0.020576
| 0
| 0.144033
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
789c67af1b551f800ad053f527ba9da4e8483e9e
| 7,199
|
py
|
Python
|
lib/python/frugal/tests/transport/test_http_transport.py
|
ariasheets-wk/frugal
|
81d41af7fb573c1f97afea99a1b4dfa6ccae29e8
|
[
"Apache-2.0"
] | 144
|
2017-08-17T15:51:58.000Z
|
2022-01-14T21:36:55.000Z
|
lib/python/frugal/tests/transport/test_http_transport.py
|
ariasheets-wk/frugal
|
81d41af7fb573c1f97afea99a1b4dfa6ccae29e8
|
[
"Apache-2.0"
] | 930
|
2017-08-17T17:53:30.000Z
|
2022-03-28T14:04:49.000Z
|
lib/python/frugal/tests/transport/test_http_transport.py
|
ariasheets-wk/frugal
|
81d41af7fb573c1f97afea99a1b4dfa6ccae29e8
|
[
"Apache-2.0"
] | 77
|
2017-08-17T15:54:31.000Z
|
2021-12-25T15:18:34.000Z
|
# Copyright 2017 Workiva
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base64 import b64encode
from struct import pack_into
import unittest
from mock import Mock
from mock import patch
from thrift.transport.TTransport import TTransportException
from frugal.exceptions import TTransportExceptionType
from frugal.transport.http_transport import THttpTransport
@patch('frugal.transport.http_transport.requests')
class TestTHttpTransport(unittest.TestCase):
def test_request(self, mock_requests):
url = 'http://localhost:8080/frugal'
headers = {'foo': 'bar'}
resp = Mock(status_code=200)
response = b'response'
buff = bytearray(4)
pack_into('!I', buff, 0, len(response))
resp.content = b64encode(buff + response)
mock_requests.post.return_value = resp
def get_headers():
return {'baz': 'qux'}
tr = THttpTransport(url, headers=headers, get_headers=get_headers,
response_capacity=500)
tr.open()
self.assertTrue(tr.isOpen())
data = b'helloworld'
buff = bytearray(4)
pack_into('!I', buff, 0, len(data))
encoded_frame = b64encode(buff + data)
tr.write(data)
tr.flush()
mock_requests.post.assert_called_once_with(
url, data=encoded_frame, timeout=None,
headers={'foo': 'bar', 'baz': 'qux', 'Content-Length': '20',
'Content-Type': 'application/x-frugal',
'Content-Transfer-Encoding': 'base64',
'User-Agent': 'Python/TBaseHttpTransport',
'x-frugal-payload-limit': '500'})
resp = tr.read(len(response))
self.assertEqual(response, resp)
tr.close()
self.assertTrue(tr.isOpen()) # open/close are no-ops
def test_request_timeout(self, mock_requests):
url = 'http://localhost:8080/frugal'
headers = {'foo': 'bar'}
resp = Mock(status_code=200)
response = b'response'
buff = bytearray(4)
pack_into('!I', buff, 0, len(response))
resp.content = b64encode(buff + response)
mock_requests.post.return_value = resp
def get_headers():
return {'baz': 'qux'}
tr = THttpTransport(url, headers=headers, get_headers=get_headers,
response_capacity=500)
tr.open()
self.assertTrue(tr.isOpen())
data = b'helloworld'
buff = bytearray(4)
pack_into('!I', buff, 0, len(data))
encoded_frame = b64encode(buff + data)
tr.set_timeout(5000)
tr.write(data)
tr.flush()
mock_requests.post.assert_called_once_with(
url, data=encoded_frame, timeout=5,
headers={'foo': 'bar', 'baz': 'qux', 'Content-Length': '20',
'Content-Type': 'application/x-frugal',
'Content-Transfer-Encoding': 'base64',
'User-Agent': 'Python/TBaseHttpTransport',
'x-frugal-payload-limit': '500'})
resp = tr.read(len(response))
self.assertEqual(response, resp)
tr.close()
self.assertTrue(tr.isOpen()) # open/close are no-ops
def test_flush_no_body(self, mock_requests):
url = 'http://localhost:8080/frugal'
tr = THttpTransport(url)
tr.flush()
self.assertFalse(mock_requests.post.called)
def test_flush_bad_response(self, mock_requests):
url = 'http://localhost:8080/frugal'
resp = Mock(status_code=500)
mock_requests.post.return_value = resp
tr = THttpTransport(url)
data = b'helloworld'
buff = bytearray(4)
pack_into('!I', buff, 0, len(data))
encoded_frame = b64encode(buff + data)
tr.write(data)
with self.assertRaises(TTransportException):
tr.flush()
mock_requests.post.assert_called_once_with(
url, data=encoded_frame, timeout=None,
headers={'Content-Length': '20',
'Content-Type': 'application/x-frugal',
'Content-Transfer-Encoding': 'base64',
'User-Agent': 'Python/TBaseHttpTransport'})
def test_flush_bad_oneway_response(self, mock_requests):
url = 'http://localhost:8080/frugal'
resp = Mock(status_code=200)
buff = bytearray(4)
pack_into('!I', buff, 0, 10)
resp.content = b64encode(buff)
mock_requests.post.return_value = resp
tr = THttpTransport(url)
data = b'helloworld'
buff = bytearray(4)
pack_into('!I', buff, 0, len(data))
encoded_frame = b64encode(buff + data)
tr.write(data)
with self.assertRaises(TTransportException):
tr.flush()
mock_requests.post.assert_called_once_with(
url, data=encoded_frame, timeout=None,
headers={'Content-Length': '20',
'Content-Type': 'application/x-frugal',
'Content-Transfer-Encoding': 'base64',
'User-Agent': 'Python/TBaseHttpTransport'})
def test_flush_oneway(self, mock_requests):
url = 'http://localhost:8080/frugal'
resp = Mock(status_code=200)
buff = bytearray(4)
pack_into('!I', buff, 0, 0)
resp.content = b64encode(buff)
mock_requests.post.return_value = resp
tr = THttpTransport(url)
data = b'helloworld'
buff = bytearray(4)
pack_into('!I', buff, 0, len(data))
encoded_frame = b64encode(buff + data)
tr.write(data)
tr.flush()
mock_requests.post.assert_called_once_with(
url, data=encoded_frame, timeout=None,
headers={'Content-Length': '20',
'Content-Type': 'application/x-frugal',
'Content-Transfer-Encoding': 'base64',
'User-Agent': 'Python/TBaseHttpTransport'})
resp = tr.read(10)
self.assertEqual(b'', resp)
def test_write_limit_exceeded(self, mock_requests):
url = 'http://localhost:8080/frugal'
resp = Mock(status_code=200)
buff = bytearray(4)
pack_into('!I', buff, 0, 0)
resp.content = b64encode(buff)
mock_requests.post.return_value = resp
tr = THttpTransport(url, request_capacity=5)
data = b'helloworld'
with self.assertRaises(TTransportException) as cm:
tr.write(data)
self.assertEqual(TTransportExceptionType.REQUEST_TOO_LARGE,
cm.exception.type)
self.assertFalse(mock_requests.post.called)
| 33.483721
| 74
| 0.600778
| 815
| 7,199
| 5.184049
| 0.198773
| 0.056805
| 0.049231
| 0.042604
| 0.740592
| 0.740592
| 0.723077
| 0.723077
| 0.713136
| 0.713136
| 0
| 0.027519
| 0.283234
| 7,199
| 214
| 75
| 33.640187
| 0.791279
| 0.081678
| 0
| 0.825806
| 0
| 0
| 0.151561
| 0.050621
| 0
| 0
| 0
| 0
| 0.116129
| 1
| 0.058065
| false
| 0
| 0.051613
| 0.012903
| 0.129032
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
78ab888022afe1cd8275b2262817ee9c767760bd
| 81
|
py
|
Python
|
pyscnet/BuildNet/__init__.py
|
MingBit/SCNetEnrich
|
898606c1823f2beae8368032684fa90fde94adf3
|
[
"MIT"
] | 5
|
2019-08-22T12:34:43.000Z
|
2021-12-21T20:54:38.000Z
|
pyscnet/BuildNet/__init__.py
|
MingBit/SCNetEnrich
|
898606c1823f2beae8368032684fa90fde94adf3
|
[
"MIT"
] | 1
|
2019-09-06T20:01:43.000Z
|
2019-09-08T18:10:39.000Z
|
pyscnet/BuildNet/__init__.py
|
MingBit/SCNetEnrich
|
898606c1823f2beae8368032684fa90fde94adf3
|
[
"MIT"
] | 2
|
2020-07-29T14:04:41.000Z
|
2020-12-21T18:49:11.000Z
|
from .gne_dockercaller import rundocker
from .gne_synchrony import get_synchrony
| 27
| 40
| 0.876543
| 11
| 81
| 6.181818
| 0.636364
| 0.205882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 81
| 2
| 41
| 40.5
| 0.931507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
78c2ae1ad3889d1d31f3d69eba3b07970ff89192
| 217
|
py
|
Python
|
test/run_pub_actor.py
|
F2011B/pyzac
|
927b8755e2eb51eab91cbd98a700fa898701fe91
|
[
"Apache-2.0"
] | null | null | null |
test/run_pub_actor.py
|
F2011B/pyzac
|
927b8755e2eb51eab91cbd98a700fa898701fe91
|
[
"Apache-2.0"
] | 11
|
2018-11-10T16:47:45.000Z
|
2019-03-07T18:51:38.000Z
|
test/run_pub_actor.py
|
F2011B/pyzac
|
927b8755e2eb51eab91cbd98a700fa898701fe91
|
[
"Apache-2.0"
] | 1
|
2018-11-10T21:12:55.000Z
|
2018-11-10T21:12:55.000Z
|
from pyzac import *
@pyzac_decorator(pub_addr="tcp://127.0.0.1:2000")
def publisher():
return 20
@pyzac_decorator(pub_addr="tcp://127.0.0.1:2001")
def publishertwo():
return 3
publisher()
publishertwo()
| 13.5625
| 49
| 0.691244
| 33
| 217
| 4.424242
| 0.545455
| 0.191781
| 0.232877
| 0.287671
| 0.410959
| 0.410959
| 0.410959
| 0.410959
| 0.410959
| 0
| 0
| 0.122995
| 0.138249
| 217
| 15
| 50
| 14.466667
| 0.657754
| 0
| 0
| 0
| 0
| 0
| 0.184332
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| true
| 0
| 0.111111
| 0.222222
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
78d2c2987da074279ceb16818ab2cc3ca4390e7c
| 199
|
py
|
Python
|
vestlus/forms/__init__.py
|
lehvitus/vestlus
|
6d9c8b1de7821e544e0c7c99f42d60f8f3805557
|
[
"BSD-3-Clause"
] | 12
|
2020-07-02T23:36:02.000Z
|
2020-12-15T07:29:20.000Z
|
vestlus/forms/__init__.py
|
lehvitus/vestlus
|
6d9c8b1de7821e544e0c7c99f42d60f8f3805557
|
[
"BSD-3-Clause"
] | null | null | null |
vestlus/forms/__init__.py
|
lehvitus/vestlus
|
6d9c8b1de7821e544e0c7c99f42d60f8f3805557
|
[
"BSD-3-Clause"
] | null | null | null |
# vestlus:forms
from .channel import ChannelForm
from .message import MessageForm
from .message import PrivateMessageForm
from .message import GroupMessageForm
from .membership import MembershipForm
| 28.428571
| 39
| 0.854271
| 22
| 199
| 7.727273
| 0.545455
| 0.194118
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110553
| 199
| 6
| 40
| 33.166667
| 0.960452
| 0.065327
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1535fc0a28ed02f6cbfe83c048a4158283283108
| 6,291
|
py
|
Python
|
tests/test_undos/test_coreutils/test_mv.py
|
joshmeranda/undo
|
f54581223c0c157702dda6124691bb40fa2e2b31
|
[
"MIT"
] | null | null | null |
tests/test_undos/test_coreutils/test_mv.py
|
joshmeranda/undo
|
f54581223c0c157702dda6124691bb40fa2e2b31
|
[
"MIT"
] | null | null | null |
tests/test_undos/test_coreutils/test_mv.py
|
joshmeranda/undo
|
f54581223c0c157702dda6124691bb40fa2e2b31
|
[
"MIT"
] | null | null | null |
import os
import shutil
import unittest
import undo.resolve as resolve
import undo.expand as expand
import tests.test_undos.test_coreutils.common as common
class TestMv(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
if os.path.exists(common.COREUTILS_TEST_ENV_DIR):
shutil.rmtree(common.COREUTILS_TEST_ENV_DIR)
os.mkdir(common.COREUTILS_TEST_ENV_DIR)
os.mknod(os.path.join(
common.COREUTILS_TEST_ENV_DIR,
"OUTER"
))
os.mkdir(os.path.join(
common.COREUTILS_TEST_ENV_DIR,
"DIR"
))
os.mknod(os.path.join(
common.COREUTILS_TEST_ENV_DIR,
"DIR",
"INNER"
))
os.mknod(os.path.join(
common.COREUTILS_TEST_ENV_DIR,
"DIR",
"ANOTHER_INNER"
))
cwd_bak = os.getcwd()
os.chdir(common.COREUTILS_TEST_ENV_DIR)
cls.addClassCleanup(shutil.rmtree, common.COREUTILS_TEST_ENV_DIR)
cls.addClassCleanup(os.chdir, cwd_bak)
def test_rename(self):
command = "mv ORIGINAL OUTER"
expected = []
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
expected = ["mv OUTER ORIGINAL"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, True, "sh")]
self.assertListEqual(expected, actual)
def test_rename_precise(self):
command = "mv --no-clobber ORIGINAL OUTER"
expected = ["mv OUTER ORIGINAL"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
def test_move_single(self):
command = "mv INNER DIR"
expected = []
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
expected = ["mv DIR/INNER INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, True, "sh")]
self.assertListEqual(expected, actual)
def test_move_single_precise(self):
command = "mv --no-clobber INNER DIR"
expected = ["mv DIR/INNER INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
def test_move_multiple(self):
command = "mv INNER ANOTHER_INNER DIR"
expected = []
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
expected = ["mv DIR/INNER INNER; mv DIR/ANOTHER_INNER ANOTHER_INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, True, "sh")]
self.assertListEqual(expected, actual)
def test_move_multiple_precise(self):
command = "mv --no-clobber INNER ANOTHER_INNER DIR"
expected = ["mv DIR/INNER INNER; mv DIR/ANOTHER_INNER ANOTHER_INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
def test_move_single_with_target_directory(self):
command = "mv -t DIR INNER"
expected = []
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
expected = ["mv DIR/INNER INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, True, "sh")]
self.assertListEqual(expected, actual)
def test_move_single_with_target_directory_precise(self):
command = "mv --no-clobbe -t DIR INNER"
expected = ["mv DIR/INNER INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
def test_move_multiple_with_target_directory(self):
command = "mv -t DIR INNER ANOTHER_INNER"
expected = []
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
expected = ["mv DIR/INNER INNER; mv DIR/ANOTHER_INNER ANOTHER_INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, True, "sh")]
self.assertListEqual(expected, actual)
def test_move_multiple_with_target_directory_precise(self):
command = "mv --no-clobber -t DIR INNER ANOTHER_INNER"
expected = ["mv DIR/INNER INNER; mv DIR/ANOTHER_INNER ANOTHER_INNER"]
actual = [expand.expand(undo, env, ("%", "%"), "; ")
for env, undo in
resolve.resolve(command, [common.COREUTILS_UNDO_DIR], False, False, "sh")]
self.assertListEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
| 33.822581
| 92
| 0.575425
| 684
| 6,291
| 5.121345
| 0.093567
| 0.102769
| 0.077077
| 0.094205
| 0.889523
| 0.876963
| 0.850414
| 0.789323
| 0.776763
| 0.755638
| 0
| 0
| 0.291687
| 6,291
| 185
| 93
| 34.005405
| 0.786131
| 0
| 0
| 0.671756
| 0
| 0
| 0.113514
| 0
| 0
| 0
| 0
| 0
| 0.114504
| 1
| 0.083969
| false
| 0
| 0.045802
| 0
| 0.137405
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
15bc7c6b3d4060f0ecdc3f4d306a193680a49fe5
| 136
|
py
|
Python
|
Sequences/moreprint.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
Sequences/moreprint.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
Sequences/moreprint.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
name = "Tim"
age = 10
print(name, age, "Python", 2020)
print(name, age, "Python", 2020, sep=", ")
# it will separate with , and space
| 17
| 42
| 0.625
| 21
| 136
| 4.047619
| 0.666667
| 0.211765
| 0.282353
| 0.423529
| 0.517647
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.191176
| 136
| 7
| 43
| 19.428571
| 0.681818
| 0.242647
| 0
| 0
| 0
| 0
| 0.17
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
ec7ffc1de7e9802f38cecdf8c552e6e15a942757
| 3,801
|
py
|
Python
|
RasPiBaseStation/ExampleData.py
|
jgOhYeah/Farm-PJON-LoRa-network
|
e506f4703674668a80574b8858dbc40104abb5bb
|
[
"MIT"
] | 1
|
2021-02-26T10:55:40.000Z
|
2021-02-26T10:55:40.000Z
|
RasPiBaseStation/ExampleData.py
|
jgOhYeah/Farm-PJON-LoRa-network
|
e506f4703674668a80574b8858dbc40104abb5bb
|
[
"MIT"
] | 1
|
2021-07-01T12:34:51.000Z
|
2021-07-03T02:28:25.000Z
|
RasPiBaseStation/ExampleData.py
|
jgOhYeah/Farm-PJON-LoRa-network
|
e506f4703674668a80574b8858dbc40104abb5bb
|
[
"MIT"
] | null | null | null |
data = [
bytearray(b'\x00\x00\x00\x00\x00'),
bytearray(b'\xff\x16:Wos\x02\x00C\xbd\x01\x02\xd1hS'),
bytearray(b'\xff\xe6_R\x93\\\x9f\x03h\xc0\x01\xb0af\xf8'),
bytearray(b'\xff&\x0f\x07ZP_\x00a\xae\x01\xd9}q\xeb'),
bytearray(b'\xff&\x0f\x07ZP:\x00a\xb2\x01n\xd8\xd4\x8b'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa2\x01\xcf\x8bf\xfd'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa3\x01\xd6\x90W\xbc'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa4\x01\x99\xd1\xc1{'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa5\x01\x80\xca\xf0:'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa6\x01\xab\xe7\xa3\xf9'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa7\x01\xb2\xfc\x92\xb8'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa8\x015d\x8ew'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xa9\x01'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xaa\x01\x07R\xec\xf5'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xab\x01\x1eI\xdd\xb4'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xac\x01Q\x08Ks'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xad\x01H\x13z2'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xaf\x01z%\x18\xb0'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xb5\x01\xca\x08\xe2k'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xb6\x01\xe1%\xb1\xa8'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xbc\x01\x1b\xcaY"'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xbf\x010\xe7\n\xe1'),
bytearray(b'\xff&\x0f\x07ZP\x00\x00a\xc5\x01\xe57\x9b\x9d'),
bytearray(b'\xff&\x0f\x07ZP\x0e\x01a\xb1\x01\xa9\xe8\xfek'),
bytearray(b'\xff&\x0f\x07ZP\x10\x15a\xbe\x01)\xfc;\xa0'),
bytearray(b'\xff&\x0f\x07ZP\x8a\x01a\xae\x01 \xd4\xe6\xa7'),
bytearray(b'\xff&\x0f\x07ZP\x9e\x04a\xa3\x017\xc4Y\x9a'),
bytearray(b'\xff&\x0f\x07ZP\xaf\x00a\xb4\x01!d\xdbm'),
bytearray(b'\xff&\x0f\x07Zp\xbb \xeb\x90\x05_N\x88\xb9'),
bytearray(b'\xff&\x0f\x07ZP\xf5\ra\xa7\x01\xc0B\x95H'),
bytearray(b'\xff&\x0f\x07ZP\xfd\x00a\xa5\x010nd\xd6'),
bytearray(b'\xff&\x0f\x07ZP2\x00a\xa8\x01\xee\x85e\x91'),
bytearray(b'\xff&\x0f\x07ZP4\x00a\xa3\x01\x821I\xfa'),
bytearray(b'\xff&\x0f\x07ZP4\x00a\xa8\x01a\xc5\x901'),
bytearray(b'\xff&\x0f\x07ZP5\x00a\xaa\x01n\x93\xdb\x03'),
bytearray(b'\xff&\x0f\x07ZP5\x00a\xb8\x01\x16g\xab\xd0'),
bytearray(b'\xff&\x0f\x07ZP6\x00a\xb9\x01H\xdc\xe0A'),
bytearray(b'\xff&\x0f\x07ZP7\x00a\xaf\x01i$|&'),
bytearray(b'\xff&\x0f\x07ZP7\x00a\xb3\x01\x8fS!{'),
bytearray(b'\xff&\x0f\x07ZP7\x00a\xb5\x01\xd9\t\x86\xfd'),
bytearray(b'\xff&\x0f\x07ZP8\x00a\xae\x01\xf2o\xda\xb6'),
bytearray(b'\xff&\x0f\x07ZP8\x00a\xb4\x01BB m'),
bytearray(b'\xff&\x0f\x07ZPc\x08a\xa9\x017Y\x9aD'),
bytearray(b'\xff&\x0f\x07ZPm\x00a\xa5\x01\xe1hC\xc6'),
bytearray(b'\xff&\x0f\x07ZPW\x01a\xb5\x01\xf8\x87\x96\xd5'),
bytearray(b'\xff&\x10_JV}\x00F\x01r\x013\xf7\xa7R'),
bytearray(b'\xff&\x10_JV\x7f\x00F\x01r\x01~?\x06Y'),
bytearray(b'\xff&\x10_JV\x80\x00F\x01r\x01\x9a\xfb\xad\x95'),
bytearray(b'\xff&\x10_JV\x81\x00F\x01r\x01Q\xa7~0'),
bytearray(b'\xff&\x10_JV\x82\x00F\x01r\x01\xd73\x0c\x9e'),
bytearray(b'\xff&\x10_JV\x83\x00F\x01r\x01\x1co\xdf;'),
bytearray(b'\xff&\x10_JV\x84\x00F\x01B\x01\x01j\xef\x83'),
bytearray(b'\xff&\x10_JV\x84\x00F\x01r\x01\x01j\xef\x83'),
bytearray(b'\xff&\x10_JV\x85\x00F\x01r\x01\xca6<&'),
bytearray(b'\xff&\x10_JV\x86\x00F\x00r\x01M`$\xbf'),
bytearray(b'\xff&\x10_JV\x86\x00F\x01r\x01L\xa2N\x88'),
bytearray(b'\xff&\x10_JV+\x00F\x01r\x01\xb5c\xd5\xfc'),
bytearray(b'\xff&\x10_JV~\x00F\x01r\x01\xb5c\xd5\xfc'),
bytearray(b'\xff&\x10_JVx\x00F\x01r\x01c:6\xe1'),
bytearray(b'\xff&\x10_JVy\x06F7r\x01~?\x06Y'),
bytearray(b'\xff&\x10UJV~\x00F\x01r\x01\xb5c\xd5\xfc'),
bytearray(b'\xff&\xd5UJV}\x00N\x01ra3\xf7\xa7R'),
bytearray(b'\xff>?\x9aC\x19G\x04-\xbf\x02\xf7\xfb\xff\xe5'),
bytearray(b'\xffF\x0f\x0bZd:\x8ak\x1a\x085\x88W\xb3'),
bytearray(b'J\x06\x07\xff\xff\xf3~'),
bytearray(b'J\x06\x08\x90\xff\xc6\x00\xc6'),
bytearray(b'J\x06\x08\x90\xff\xc6\x01@'),
bytearray(b"\xff&\x0f\x07ZP\x00\x00a\xb1\x01\xaed\'o")
]
| 54.3
| 63
| 0.714023
| 733
| 3,801
| 3.678035
| 0.28513
| 0.252226
| 0.303783
| 0.255193
| 0.552671
| 0.434718
| 0.329377
| 0.11276
| 0.070475
| 0.034866
| 0
| 0.201771
| 0.019469
| 3,801
| 70
| 64
| 54.3
| 0.521599
| 0
| 0
| 0
| 0
| 0.057143
| 0.711205
| 0.696739
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ec95fb419665c9147b447646699648e826747d7d
| 65
|
py
|
Python
|
src/admin_panel/__init__.py
|
sahilsehgal1995/lenme-api
|
65826619b039c5c4035b6e0c133c32014977489e
|
[
"MIT"
] | null | null | null |
src/admin_panel/__init__.py
|
sahilsehgal1995/lenme-api
|
65826619b039c5c4035b6e0c133c32014977489e
|
[
"MIT"
] | null | null | null |
src/admin_panel/__init__.py
|
sahilsehgal1995/lenme-api
|
65826619b039c5c4035b6e0c133c32014977489e
|
[
"MIT"
] | null | null | null |
from src.user.schemas import *
from src.products.schemas import *
| 32.5
| 34
| 0.8
| 10
| 65
| 5.2
| 0.6
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107692
| 65
| 2
| 34
| 32.5
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
ecabba1a581b685ea13612eb7df9a6cb0a44471c
| 149
|
py
|
Python
|
src/stackoverflow/58862981/main.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | null | null | null |
src/stackoverflow/58862981/main.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | null | null | null |
src/stackoverflow/58862981/main.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | 3
|
2020-02-19T08:02:04.000Z
|
2021-06-08T13:27:51.000Z
|
class AADatabase:
@classmethod
def is_primary(cls):
return False
@classmethod
def run(cls):
return cls.is_primary()
| 16.555556
| 31
| 0.61745
| 17
| 149
| 5.294118
| 0.588235
| 0.311111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.302013
| 149
| 8
| 32
| 18.625
| 0.865385
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.285714
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
ecb0f7708caf19dcd5be44126835c1fe61fea19a
| 131
|
py
|
Python
|
app/sendmail/__init__.py
|
csud-reservation/flask-backend
|
fe68807dd47b643991b16663b2145687faaf45e3
|
[
"MIT"
] | 1
|
2017-09-28T07:58:31.000Z
|
2017-09-28T07:58:31.000Z
|
app/sendmail/__init__.py
|
csud-reservation/flask-backend
|
fe68807dd47b643991b16663b2145687faaf45e3
|
[
"MIT"
] | 16
|
2017-04-18T21:26:25.000Z
|
2017-08-31T15:18:12.000Z
|
app/sendmail/__init__.py
|
csud-reservation/flask-backend
|
fe68807dd47b643991b16663b2145687faaf45e3
|
[
"MIT"
] | 1
|
2017-04-18T19:21:32.000Z
|
2017-04-18T19:21:32.000Z
|
from flask import Blueprint
sendmail = Blueprint('sendmail', __name__, template_folder='templates/sendmail')
from . import views
| 21.833333
| 80
| 0.793893
| 15
| 131
| 6.6
| 0.666667
| 0.343434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114504
| 131
| 5
| 81
| 26.2
| 0.853448
| 0
| 0
| 0
| 0
| 0
| 0.198473
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
eccee7e942f554956198e0b3ca7b95243d4314fa
| 9,757
|
py
|
Python
|
main.py
|
btanner/differential_value_iteration
|
5b734397cd158b6783f5fb44106774ba2a28049f
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
btanner/differential_value_iteration
|
5b734397cd158b6783f5fb44106774ba2a28049f
|
[
"Apache-2.0"
] | 24
|
2021-08-16T23:21:39.000Z
|
2021-11-25T20:46:17.000Z
|
main.py
|
btanner/differential_value_iteration
|
5b734397cd158b6783f5fb44106774ba2a28049f
|
[
"Apache-2.0"
] | 2
|
2021-08-06T21:15:49.000Z
|
2021-08-16T04:29:52.000Z
|
"""Sample program that runs a sweep and records results."""
from pathlib import Path
from typing import Sequence
import numpy as np
from absl import app
from absl import flags
from differential_value_iteration import utils
from differential_value_iteration.algorithms import algorithms
from differential_value_iteration.environments import garet
from differential_value_iteration.environments import micro
FLAGS = flags.FLAGS
flags.DEFINE_string(name='plot_dir', default='plots', help='path to plot dir')
flags.DEFINE_integer('max_iters', 100000, 'Maximum iterations per algorithm.')
flags.DEFINE_float('epsilon', 1e-7, 'Tolerance for convergence.')
flags.DEFINE_bool('mrp', True, 'Run mrp experiments.')
flags.DEFINE_bool('mdp', True, 'Run mdp experiments.')
def main(argv):
del argv
alphas = [1.0, 0.999, 0.99, 0.9, 0.7, 0.5, 0.3, 0.1, 0.01, 0.001]
betas = [1.0, 0.999, 0.99, 0.9, 0.7, 0.5, 0.3, 0.1, 0.01, 0.001]
max_iters = FLAGS.max_iters
epsilon = FLAGS.epsilon
plot_dir = FLAGS.plot_dir
if plot_dir[-1] != '/':
plot_dir += '/'
Path(plot_dir).mkdir(parents=True, exist_ok=True)
if FLAGS.mrp:
run_mrps(alphas=alphas,
betas=betas,
max_iters=max_iters,
epsilon=epsilon,
plot_dir=plot_dir)
if FLAGS.mdp:
run_mdps(alphas=alphas,
betas=betas,
max_iters=max_iters,
epsilon=epsilon,
plot_dir=plot_dir)
def run_mrps(
alphas: Sequence[float],
betas: Sequence[float],
max_iters: int,
epsilon: float,
plot_dir: str):
envs = [
micro.create_mrp1(dtype=np.float32),
micro.create_mrp2(dtype=np.float32),
micro.create_mrp3(dtype=np.float32),
]
for env in envs:
init_v = np.zeros(env.num_states)
init_r_bar_scalar = 0
init_r_bar_vec = np.zeros(env.num_states)
results = exp_RVI_Evaluation(env, 'exec_sync', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Evaluation_sync', alphas)
results = exp_RVI_Evaluation(env, 'exec_async', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Evaluation_async', alphas)
results = exp_DVI_Evaluation(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Evaluation_sync', alphas,
betas)
results = exp_DVI_Evaluation(env, 'exec_async', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Evaluation_async', alphas,
betas)
results = exp_MDVI_Evaluation(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Evaluation_sync', alphas,
betas)
results = exp_MDVI_Evaluation(env, 'exec_async', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Evaluation_async',
alphas,
betas)
def run_mdps(alphas: Sequence[float], betas: Sequence[float], max_iters: int,
epsilon: float, plot_dir: str):
garet_env = garet.create(seed=42,
num_states=10,
num_actions=2,
branching_factor=3)
envs = [garet_env, micro.mdp2]
for env in envs:
init_v = np.zeros(env.num_states)
init_r_bar_scalar = 0
init_r_bar_vec = np.zeros(env.num_states)
results = exp_RVI_Control(env, 'exec_sync', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Control_sync', alphas)
results = exp_RVI_Control(env, 'exec_async', alphas, init_v, max_iters,
epsilon, ref_idx=0)
utils.draw(results, plot_dir + env.name + '_RVI_Control_async', alphas)
results = exp_DVI_Control(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Control_sync', alphas,
betas)
results = exp_DVI_Control(env, 'exec_async', alphas, betas, init_v,
init_r_bar_scalar, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_DVI_Control_async', alphas,
betas)
results = exp_MDVI_Control1(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control1_sync', alphas,
betas)
results = exp_MDVI_Control1(env, 'exec_async', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control1_async', alphas,
betas)
results = exp_MDVI_Control2(env, 'exec_sync', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control2_sync', alphas,
betas)
results = exp_MDVI_Control2(env, 'exec_async', alphas, betas, init_v,
init_r_bar_vec, max_iters, epsilon)
utils.draw(results, plot_dir + env.name + '_MDVI_Control2_async', alphas,
betas)
def exp_RVI_Evaluation(env, update_rule, alphas, init_v, max_iters, epsilon,
ref_idx=0):
convergence_flags = np.zeros(len(alphas))
for alpha_idx, alpha in enumerate(alphas):
alg = algorithms.RVI_Evaluation(env, init_v, alpha, ref_idx)
print(f'{env.name} RVI Evaluation {update_rule} alpha:{alpha}', end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx] = convergence
return convergence_flags
def exp_RVI_Control(env, update_rule, alphas, init_v, max_iters, epsilon,
ref_idx=0):
convergence_flags = np.zeros(len(alphas))
for alpha_idx, alpha in enumerate(alphas):
alg = algorithms.RVI_Control(env, init_v, alpha, ref_idx)
print(f'{env.name} RVI Control {update_rule} alpha:{alpha}', end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx] = convergence
return convergence_flags
def exp_DVI_Evaluation(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.DVI_Evaluation(env, init_v, init_r_bar, alpha, beta)
print(
f'{env.name} DVI Evaluation {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_DVI_Control(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.DVI_Control(env, init_v, init_r_bar, alpha, beta)
print(f'{env.name} DVI Control {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_MDVI_Evaluation(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.MDVI_Evaluation(env, init_v, init_r_bar, alpha, beta)
print(
f'{env.name} MDVI Evaluation {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_MDVI_Control1(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.MDVI_Control1(env, init_v, init_r_bar, alpha, beta)
print(f'{env.name} MDVI Control1 {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
def exp_MDVI_Control2(env, update_rule, alphas, betas, init_v, init_r_bar,
max_iters, epsilon):
convergence_flags = np.zeros((len(alphas), len(betas)))
for alpha_idx, alpha in enumerate(alphas):
for beta_idx, beta in enumerate(betas):
alg = algorithms.MDVI_Control2(env, init_v, init_r_bar, alpha, beta)
print(f'{env.name} MDVI Control2 {update_rule} alpha:{alpha} beta:{beta}',
end=' ')
convergence = utils.run_alg(alg, update_rule, max_iters, epsilon)
print(f'Converged? {convergence}')
convergence_flags[alpha_idx, beta_idx] = convergence
return convergence_flags
if __name__ == '__main__':
app.run(main)
| 42.982379
| 80
| 0.664241
| 1,341
| 9,757
| 4.550336
| 0.100671
| 0.048509
| 0.076205
| 0.032776
| 0.841855
| 0.818912
| 0.773189
| 0.773189
| 0.74238
| 0.74238
| 0
| 0.013099
| 0.225377
| 9,757
| 226
| 81
| 43.172566
| 0.794258
| 0.005432
| 0
| 0.505
| 0
| 0
| 0.120128
| 0.008765
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.045
| 0
| 0.13
| 0.07
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
01c4745ae2465498eec2a476c766698d1f857fc5
| 2,256
|
py
|
Python
|
data_processor/imgs_to_arr.py
|
BoyuanChen/visual_behavior_modeling
|
8b6eb0516c562306c5d775632223ad0de775f170
|
[
"MIT"
] | 9
|
2019-12-04T12:50:43.000Z
|
2021-02-28T13:45:30.000Z
|
data_processor/imgs_to_arr.py
|
BoyuanChen/visual_behavior_modeling
|
8b6eb0516c562306c5d775632223ad0de775f170
|
[
"MIT"
] | null | null | null |
data_processor/imgs_to_arr.py
|
BoyuanChen/visual_behavior_modeling
|
8b6eb0516c562306c5d775632223ad0de775f170
|
[
"MIT"
] | 2
|
2020-07-09T20:35:15.000Z
|
2020-11-16T14:03:10.000Z
|
import os
from PIL import Image
import numpy as np
from tqdm import tqdm
def mkdir(folder):
if os.path.exists(folder):
shutil.rmtree(folder)
os.makedirs(folder)
data_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_train_data_imgs/rgb_data_imgs'
target_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_train_data_imgs/rgb_target_imgs'
data_arr_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_train_data_imgs/rgb_data_arr'
target_arr_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_train_data_imgs/rgb_target_arr'
mkdir(data_arr_filepath)
mkdir(target_arr_filepath)
filelist = os.listdir(data_filepath)
for p_file in tqdm(filelist):
im = Image.open(os.path.join(data_filepath, p_file))
np_im = np.array(im)
np.save(os.path.join(data_arr_filepath, p_file.split('.')[0] + '.npy'), np_im)
filelist = os.listdir(target_filepath)
for p_file in tqdm(filelist):
im = Image.open(os.path.join(target_filepath, p_file))
np_im = np.array(im)
np.save(os.path.join(target_arr_filepath, p_file.split('.')[0] + '.npy'), np_im)
data_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_test_data_imgs/rgb_data_imgs'
target_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_test_data_imgs/rgb_target_imgs'
data_arr_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_test_data_imgs/rgb_data_arr'
target_arr_filepath = '/home/cml/bo/ToM_Base/sim_tom/rgb/tom_simple_rgb/data_processor/augmented_test_data_imgs/rgb_target_arr'
mkdir(data_arr_filepath)
mkdir(target_arr_filepath)
filelist = os.listdir(data_filepath)
for p_file in tqdm(filelist):
im = Image.open(os.path.join(data_filepath, p_file))
np_im = np.array(im)
np.save(os.path.join(data_arr_filepath, p_file.split('.')[0] + '.npy'), np_im)
filelist = os.listdir(target_filepath)
for p_file in tqdm(filelist):
im = Image.open(os.path.join(target_filepath, p_file))
np_im = np.array(im)
np.save(os.path.join(target_arr_filepath, p_file.split('.')[0] + '.npy'), np_im)
| 29.684211
| 128
| 0.772606
| 388
| 2,256
| 4.141753
| 0.126289
| 0.052271
| 0.074673
| 0.08463
| 0.920971
| 0.920971
| 0.920971
| 0.920971
| 0.920971
| 0.920971
| 0
| 0.001968
| 0.098848
| 2,256
| 76
| 129
| 29.684211
| 0.78849
| 0
| 0
| 0.6
| 0
| 0
| 0.374445
| 0.365572
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0.1
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
01d7ca23b6afa966af7ebe5976fa1c6f4689b516
| 64
|
py
|
Python
|
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/imports/test_import_package_5/random_module/main.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | null | null | null |
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/imports/test_import_package_5/random_module/main.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | 5
|
2021-04-08T22:02:15.000Z
|
2022-02-10T14:53:45.000Z
|
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/imports/test_import_package_5/random_module/main.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | null | null | null |
import in1.in2.main2 as m
import in1.file as f
print(m.x + f.y)
| 16
| 25
| 0.703125
| 16
| 64
| 2.8125
| 0.6875
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075472
| 0.171875
| 64
| 3
| 26
| 21.333333
| 0.773585
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
1779e6795459b2a40c58d6a05b53394862c01bb3
| 40
|
py
|
Python
|
tests/conftest.py
|
EJEmmett/functimer
|
1ab5f996947e0cfa0ac3a62490d451fa7277170b
|
[
"MIT"
] | 1
|
2021-04-22T05:37:38.000Z
|
2021-04-22T05:37:38.000Z
|
tests/conftest.py
|
EJEmmett/functimer
|
1ab5f996947e0cfa0ac3a62490d451fa7277170b
|
[
"MIT"
] | 1
|
2021-05-12T05:24:23.000Z
|
2021-05-12T05:24:23.000Z
|
tests/conftest.py
|
EJEmmett/functimer
|
1ab5f996947e0cfa0ac3a62490d451fa7277170b
|
[
"MIT"
] | 2
|
2021-04-22T19:06:47.000Z
|
2021-05-07T01:05:52.000Z
|
from tests.test_timer import mock_timed
| 20
| 39
| 0.875
| 7
| 40
| 4.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 40
| 1
| 40
| 40
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
179adb3d6ab0cec52730216efe94151ae604749d
| 27,344
|
py
|
Python
|
tests/backend_test.py
|
onebitaway/khal
|
3c11e1a9eade1b91bea08c38f6acbc68d4f39d47
|
[
"MIT"
] | null | null | null |
tests/backend_test.py
|
onebitaway/khal
|
3c11e1a9eade1b91bea08c38f6acbc68d4f39d47
|
[
"MIT"
] | null | null | null |
tests/backend_test.py
|
onebitaway/khal
|
3c11e1a9eade1b91bea08c38f6acbc68d4f39d47
|
[
"MIT"
] | null | null | null |
import pytest
import pytz
from datetime import date, datetime, timedelta, time
import icalendar
from khal.khalendar import backend
from khal.khalendar.event import LocalizedEvent
from khal.khalendar.exceptions import OutdatedDbVersionError, UpdateFailed
from .aux import _get_text
BERLIN = pytz.timezone('Europe/Berlin')
LONDON = pytz.timezone('Europe/London')
SAMOA = pytz.timezone('Pacific/Samoa')
SYDNEY = pytz.timezone('Australia/Sydney')
LOCALE_BERLIN = {'local_timezone': BERLIN, 'default_timezone': BERLIN}
LOCALE_SAMOA = {'local_timezone': SAMOA, 'default_timezone': SAMOA}
LOCALE_SYDNEY = {'local_timezone': SYDNEY, 'default_timezone': SYDNEY}
calname = 'home'
def test_new_db_version():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
backend.DB_VERSION += 1
with pytest.raises(OutdatedDbVersionError):
dbi._check_table_version()
def test_event_rrule_recurrence_id():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
assert dbi.list() == list()
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 8, 26, 0, 0)))
assert list(events) == list()
dbi.update(_get_text('event_rrule_recuid'), href='12345.ics', etag='abcd')
assert dbi.list() == [('12345.ics', 'abcd')]
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 8, 26, 0, 0)))
events = sorted(events, key=lambda x: x.start)
assert len(events) == 6
assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0))
assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0))
assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 7, 0))
assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0))
assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0))
assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0))
def test_event_different_timezones():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(_get_text('event_dt_london'), href='12345.ics', etag='abcd')
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 9, 0, 0)),
BERLIN.localize(datetime(2014, 4, 9, 23, 59)))
events = list(events)
assert len(events) == 1
event = events[0]
assert event.start_local == LONDON.localize(datetime(2014, 4, 9, 14))
assert event.end_local == LONDON.localize(datetime(2014, 4, 9, 19))
# no event scheduled on the next day
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 10, 0, 0)),
BERLIN.localize(datetime(2014, 4, 10, 23, 59)))
events = list(events)
assert len(events) == 0
# now setting the local_timezone to Sydney
dbi.locale = LOCALE_SYDNEY
events = dbi.get_localized([calname],
SYDNEY.localize(datetime(2014, 4, 9, 0, 0)),
SYDNEY.localize(datetime(2014, 4, 9, 23, 59)))
events = list(events)
assert len(events) == 1
event = events[0]
assert event.start_local == SYDNEY.localize(datetime(2014, 4, 9, 23))
assert event.end_local == SYDNEY.localize(datetime(2014, 4, 10, 4))
# the event spans midnight Sydney, therefor it should also show up on the
# next day
events = dbi.get_localized([calname],
SYDNEY.localize(datetime(2014, 4, 10, 0, 0)),
SYDNEY.localize(datetime(2014, 4, 10, 23, 59)))
events = list(events)
assert len(events) == 1
assert event.start_local == SYDNEY.localize(datetime(2014, 4, 9, 23))
assert event.end_local == SYDNEY.localize(datetime(2014, 4, 10, 4))
def test_event_rrule_recurrence_id_invalid_tzid():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(_get_text('event_rrule_recuid_invalid_tzid'), href='12345.ics', etag='abcd')
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 30, 0, 0)),
BERLIN.localize(datetime(2014, 9, 26, 0, 0)))
events = sorted(events)
assert len(events) == 6
assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0))
assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0))
assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 7, 0))
assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0))
assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0))
assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0))
event_rrule_recurrence_id_reverse = """
BEGIN:VCALENDAR
BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RECURRENCE-ID:20140707T050000Z
DTSTART;TZID=Europe/Berlin:20140707T090000
DTEND;TZID=Europe/Berlin:20140707T140000
END:VEVENT
BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RRULE:FREQ=WEEKLY;COUNT=6
DTSTART;TZID=Europe/Berlin:20140630T070000
DTEND;TZID=Europe/Berlin:20140630T120000
END:VEVENT
END:VCALENDAR
"""
def test_event_rrule_recurrence_id_reverse():
"""as icalendar elements can be saved in arbitrary order, we also have to
deal with `reverse` ordered icalendar files
"""
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
assert dbi.list() == list()
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 8, 26, 0, 0)))
assert list(events) == list()
dbi.update(event_rrule_recurrence_id_reverse, href='12345.ics', etag='abcd')
assert dbi.list() == [('12345.ics', 'abcd')]
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 8, 26, 0, 0)))
events = sorted(events, key=lambda x: x.start)
assert len(events) == 6
assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0))
assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0))
assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 7, 0))
assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0))
assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0))
assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0))
def test_event_rrule_recurrence_id_update_with_exclude():
"""
test if updates work as they should. The updated event has the extra
RECURRENCE-ID event removed and one recurrence date excluded via EXDATE
"""
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(_get_text('event_rrule_recuid'), href='12345.ics', etag='abcd')
dbi.update(_get_text('event_rrule_recuid_update'), href='12345.ics', etag='abcd')
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 30, 0, 0)),
BERLIN.localize(datetime(2014, 9, 26, 0, 0)))
events = sorted(events, key=lambda x: x.start)
assert len(events) == 5
assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0))
assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 7, 0))
assert events[2].start == BERLIN.localize(datetime(2014, 7, 21, 7, 0))
assert events[3].start == BERLIN.localize(datetime(2014, 7, 28, 7, 0))
assert events[4].start == BERLIN.localize(datetime(2014, 8, 4, 7, 0))
def test_no_valid_timezone():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(_get_text('event_dt_local_missing_tz'), href='12345.ics', etag='abcd')
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 9, 0, 0)),
BERLIN.localize(datetime(2014, 4, 10, 0, 0)))
events = sorted(list(events))
assert len(events) == 1
event = events[0]
assert event.start == BERLIN.localize(datetime(2014, 4, 9, 9, 30))
def test_event_delete():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
assert dbi.list() == list()
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 8, 26, 0, 0)))
assert list(events) == list()
dbi.update(event_rrule_recurrence_id_reverse, href='12345.ics', etag='abcd')
assert dbi.list() == [('12345.ics', 'abcd')]
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 9, 26, 0, 0)))
assert len(list(events)) == 6
dbi.delete('12345.ics')
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 9, 26, 0, 0)))
assert len(list(events)) == 0
event_rrule_this_and_prior = """
BEGIN:VCALENDAR
BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z
DTSTART;TZID=Europe/Berlin:20140630T070000
DTEND;TZID=Europe/Berlin:20140630T120000
END:VEVENT
BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RECURRENCE-ID;RANGE=THISANDPRIOR:20140707T050000Z
DTSTART;TZID=Europe/Berlin:20140707T090000
DTEND;TZID=Europe/Berlin:20140707T140000
END:VEVENT
END:VCALENDAR
"""
def test_this_and_prior():
"""we do not support THISANDPRIOR, therefore this should fail"""
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
with pytest.raises(UpdateFailed):
dbi.update(event_rrule_this_and_prior, href='12345.ics', etag='abcd')
event_rrule_this_and_future_temp = """
BEGIN:VCALENDAR
BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z
DTSTART;TZID=Europe/Berlin:20140630T070000
DTEND;TZID=Europe/Berlin:20140630T120000
END:VEVENT
BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit (lang)
RECURRENCE-ID;RANGE=THISANDFUTURE:20140707T050000Z
DTSTART;TZID=Europe/Berlin:{0}
DTEND;TZID=Europe/Berlin:{1}
END:VEVENT
END:VCALENDAR
"""
event_rrule_this_and_future = \
event_rrule_this_and_future_temp.format('20140707T090000', '20140707T180000')
def test_event_rrule_this_and_future():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(event_rrule_this_and_future, href='12345.ics', etag='abcd')
assert dbi.list() == [('12345.ics', 'abcd')]
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 30, 0, 0)),
BERLIN.localize(datetime(2014, 9, 26, 0, 0)))
events = sorted(events, key=lambda x: x.start)
assert len(events) == 6
assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0))
assert events[1].start == BERLIN.localize(datetime(2014, 7, 7, 9, 0))
assert events[2].start == BERLIN.localize(datetime(2014, 7, 14, 9, 0))
assert events[3].start == BERLIN.localize(datetime(2014, 7, 21, 9, 0))
assert events[4].start == BERLIN.localize(datetime(2014, 7, 28, 9, 0))
assert events[5].start == BERLIN.localize(datetime(2014, 8, 4, 9, 0))
assert events[0].end == BERLIN.localize(datetime(2014, 6, 30, 12, 0))
assert events[1].end == BERLIN.localize(datetime(2014, 7, 7, 18, 0))
assert events[2].end == BERLIN.localize(datetime(2014, 7, 14, 18, 0))
assert events[3].end == BERLIN.localize(datetime(2014, 7, 21, 18, 0))
assert events[4].end == BERLIN.localize(datetime(2014, 7, 28, 18, 0))
assert events[5].end == BERLIN.localize(datetime(2014, 8, 4, 18, 0))
assert str(events[0].summary) == 'Arbeit'
for num, event in enumerate(events[1:]):
event.raw
assert str(event.summary) == 'Arbeit (lang)'
event_rrule_this_and_future_multi_day_shift = \
event_rrule_this_and_future_temp.format('20140708T090000', '20140709T150000')
def test_event_rrule_this_and_future_multi_day_shift():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(event_rrule_this_and_future_multi_day_shift, href='12345.ics', etag='abcd')
assert dbi.list() == [('12345.ics', 'abcd')]
events = dbi.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 30, 0, 0)),
BERLIN.localize(datetime(2014, 9, 26, 0, 0)))
events = sorted(events, key=lambda x: x.start)
assert len(events) == 6
assert events[0].start == BERLIN.localize(datetime(2014, 6, 30, 7, 0))
assert events[1].start == BERLIN.localize(datetime(2014, 7, 8, 9, 0))
assert events[2].start == BERLIN.localize(datetime(2014, 7, 15, 9, 0))
assert events[3].start == BERLIN.localize(datetime(2014, 7, 22, 9, 0))
assert events[4].start == BERLIN.localize(datetime(2014, 7, 29, 9, 0))
assert events[5].start == BERLIN.localize(datetime(2014, 8, 5, 9, 0))
assert events[0].end == BERLIN.localize(datetime(2014, 6, 30, 12, 0))
assert events[1].end == BERLIN.localize(datetime(2014, 7, 9, 15, 0))
assert events[2].end == BERLIN.localize(datetime(2014, 7, 16, 15, 0))
assert events[3].end == BERLIN.localize(datetime(2014, 7, 23, 15, 0))
assert events[4].end == BERLIN.localize(datetime(2014, 7, 30, 15, 0))
assert events[5].end == BERLIN.localize(datetime(2014, 8, 6, 15, 0))
assert str(events[0].summary) == 'Arbeit'
for event in events[1:]:
assert str(event.summary) == 'Arbeit (lang)'
event_rrule_this_and_future_allday_temp = """
BEGIN:VCALENDAR
BEGIN:VEVENT
UID:event_rrule_recurrence_id_allday
SUMMARY:Arbeit
RRULE:FREQ=WEEKLY;UNTIL=20140806
DTSTART;VALUE=DATE:20140630
DTEND;VALUE=DATE:20140701
END:VEVENT
BEGIN:VEVENT
UID:event_rrule_recurrence_id_allday
SUMMARY:Arbeit (lang)
RECURRENCE-ID;RANGE=THISANDFUTURE;VALUE=DATE:20140707
DTSTART;VALUE=DATE:{}
DTEND;VALUE=DATE:{}
END:VEVENT
END:VCALENDAR
"""
event_rrule_this_and_future_allday = \
event_rrule_this_and_future_allday_temp.format(20140708, 20140709)
def test_event_rrule_this_and_future_allday():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(event_rrule_this_and_future_allday,
href='rrule_this_and_future_allday.ics', etag='abcd')
assert dbi.list() == [('rrule_this_and_future_allday.ics', 'abcd')]
events = list(dbi.get_floating([calname],
datetime(2014, 4, 30, 0, 0),
datetime(2014, 9, 27, 0, 0)))
assert len(events) == 6
assert events[0].start == date(2014, 6, 30)
assert events[1].start == date(2014, 7, 8)
assert events[2].start == date(2014, 7, 15)
assert events[3].start == date(2014, 7, 22)
assert events[4].start == date(2014, 7, 29)
assert events[5].start == date(2014, 8, 5)
assert events[0].end == date(2014, 6, 30)
assert events[1].end == date(2014, 7, 8)
assert events[2].end == date(2014, 7, 15)
assert events[3].end == date(2014, 7, 22)
assert events[4].end == date(2014, 7, 29)
assert events[5].end == date(2014, 8, 5)
assert str(events[0].summary) == 'Arbeit'
for event in events[1:]:
assert str(event.summary) == 'Arbeit (lang)'
def test_event_rrule_this_and_future_allday_prior():
event_rrule_this_and_future_allday_prior = \
event_rrule_this_and_future_allday_temp.format(20140705, 20140706)
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(event_rrule_this_and_future_allday_prior,
href='rrule_this_and_future_allday.ics', etag='abcd')
assert dbi.list() == [('rrule_this_and_future_allday.ics', 'abcd')]
events = list(dbi.get_floating([calname],
datetime(2014, 4, 30, 0, 0),
datetime(2014, 9, 27, 0, 0)))
assert len(events) == 6
assert events[0].start == date(2014, 6, 30)
assert events[1].start == date(2014, 7, 5)
assert events[2].start == date(2014, 7, 12)
assert events[3].start == date(2014, 7, 19)
assert events[4].start == date(2014, 7, 26)
assert events[5].start == date(2014, 8, 2)
assert events[0].end == date(2014, 6, 30)
assert events[1].end == date(2014, 7, 5)
assert events[2].end == date(2014, 7, 12)
assert events[3].end == date(2014, 7, 19)
assert events[4].end == date(2014, 7, 26)
assert events[5].end == date(2014, 8, 2)
assert str(events[0].summary) == 'Arbeit'
for event in events[1:]:
assert str(event.summary) == 'Arbeit (lang)'
event_rrule_multi_this_and_future_allday = """BEGIN:VCALENDAR
BEGIN:VEVENT
UID:event_multi_rrule_recurrence_id_allday
SUMMARY:Arbeit
RRULE:FREQ=WEEKLY;UNTIL=20140806
DTSTART;VALUE=DATE:20140630
DTEND;VALUE=DATE:20140701
END:VEVENT
BEGIN:VEVENT
UID:event_multi_rrule_recurrence_id_allday
SUMMARY:Arbeit (neu)
RECURRENCE-ID;RANGE=THISANDFUTURE;VALUE=DATE:20140721
DTSTART;VALUE=DATE:20140717
DTEND;VALUE=DATE:20140718
END:VEVENT
BEGIN:VEVENT
UID:event_multi_rrule_recurrence_id_allday
SUMMARY:Arbeit (lang)
RECURRENCE-ID;RANGE=THISANDFUTURE;VALUE=DATE:20140707
DTSTART;VALUE=DATE:20140712
DTEND;VALUE=DATE:20140714
END:VEVENT
END:VCALENDAR"""
def test_event_rrule_multi_this_and_future_allday():
dbi = backend.SQLiteDb(calname, ':memory:', locale=LOCALE_BERLIN)
dbi.update(event_rrule_multi_this_and_future_allday,
href='event_rrule_multi_this_and_future_allday.ics', etag='abcd')
assert dbi.list() == [('event_rrule_multi_this_and_future_allday.ics', 'abcd')]
events = sorted(dbi.get_floating([calname],
datetime(2014, 4, 30, 0, 0),
datetime(2014, 9, 27, 0, 0)))
assert len(events) == 6
assert events[0].start == date(2014, 6, 30)
assert events[1].start == date(2014, 7, 12)
assert events[2].start == date(2014, 7, 17)
assert events[3].start == date(2014, 7, 19)
assert events[4].start == date(2014, 7, 24)
assert events[5].start == date(2014, 7, 31)
assert events[0].end == date(2014, 6, 30)
assert events[1].end == date(2014, 7, 13)
assert events[2].end == date(2014, 7, 17)
assert events[3].end == date(2014, 7, 20)
assert events[4].end == date(2014, 7, 24)
assert events[5].end == date(2014, 7, 31)
assert str(events[0].summary) == 'Arbeit'
for event in [events[1], events[3]]:
assert str(event.summary) == 'Arbeit (lang)'
for event in [events[2], events[4], events[5]]:
assert str(event.summary) == 'Arbeit (neu)'
master = """BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z
DTSTART;TZID=Europe/Berlin:20140630T070000
DTEND;TZID=Europe/Berlin:20140630T120000
END:VEVENT"""
recuid_this_future = icalendar.Event.from_ical("""BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RECURRENCE-ID;RANGE=THISANDFUTURE:20140707T050000Z
DTSTART;TZID=Europe/Berlin:20140707T090000
DTEND;TZID=Europe/Berlin:20140707T140000
END:VEVENT""")
recuid_this_future_duration = icalendar.Event.from_ical("""BEGIN:VEVENT
UID:event_rrule_recurrence_id
SUMMARY:Arbeit
RECURRENCE-ID;RANGE=THISANDFUTURE:20140707T050000Z
DTSTART;TZID=Europe/Berlin:20140707T090000
DURATION:PT4H30M
END:VEVENT""")
def test_calc_shift_deltas():
assert (timedelta(hours=2), timedelta(hours=5)) == \
backend.calc_shift_deltas(recuid_this_future)
assert (timedelta(hours=2), timedelta(hours=4, minutes=30)) == \
backend.calc_shift_deltas(recuid_this_future_duration)
event_a = """BEGIN:VEVENT
UID:123
SUMMARY:event a
RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z
DTSTART;TZID=Europe/Berlin:20140630T070000
DTEND;TZID=Europe/Berlin:20140630T120000
END:VEVENT"""
event_b = """BEGIN:VEVENT
UID:123
SUMMARY:event b
RRULE:FREQ=WEEKLY;UNTIL=20140806T060000Z
DTSTART;TZID=Europe/Berlin:20140630T070000
DTEND;TZID=Europe/Berlin:20140630T120000
END:VEVENT"""
def test_two_calendars_same_uid(tmpdir):
dbpath = str(tmpdir) + '/khal.db'
dba = backend.SQLiteDb(calname, dbpath, locale=LOCALE_BERLIN)
dbb = backend.SQLiteDb('work', dbpath, locale=LOCALE_BERLIN)
assert dba.list() == []
assert dbb.list() == []
dba.update(event_a, href='12345.ics', etag='abcd')
assert dba.list() == [('12345.ics', 'abcd')]
assert dbb.list() == []
dbb.update(event_b, href='12345.ics', etag='abcd')
assert dba.list() == [('12345.ics', 'abcd')]
assert dbb.list() == [('12345.ics', 'abcd')]
events_a = list(dba.get_localized(['home'],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 7, 26, 0, 0))))
events_b = list(dba.get_localized(['work'],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 7, 26, 0, 0))))
assert len(events_a) == 4
assert len(events_b) == 4
events_c = list(dba.get_localized(['work', 'home'],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 7, 26, 0, 0))))
assert len(events_c) == 8
assert [event.calendar for event in events_c].count('home') == 4
assert [event.calendar for event in events_c].count('work') == 4
dba.delete('12345.ics')
events_a = list(dba.get_localized(['home'],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 7, 26, 0, 0))))
events_b = list(dba.get_localized(['work'],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 7, 26, 0, 0))))
assert len(events_a) == 0
assert len(events_b) == 4
events_c = list(dba.get_localized(['work', 'home'],
BERLIN.localize(datetime(2014, 6, 30, 0, 0)),
BERLIN.localize(datetime(2014, 7, 26, 0, 0))))
assert [event.calendar for event in events_c].count('home') == 0
assert [event.calendar for event in events_c].count('work') == 4
assert dba.list() == []
assert dbb.list() == [('12345.ics', 'abcd')]
def test_update_one_should_not_affect_others(tmpdir):
"""test if an THISANDFUTURE param effects other events as well"""
dbpath = str(tmpdir) + '/khal.db'
db = backend.SQLiteDb(calname, dbpath, locale=LOCALE_BERLIN)
db.update(_get_text('event_d_15'), href='first')
events = db.get_floating([calname], datetime(2015, 4, 9, 0, 0), datetime(2015, 4, 10, 0, 0))
assert len(list(events)) == 1
db.update(event_rrule_multi_this_and_future_allday, href='second')
events = list(db.get_floating([calname],
datetime(2015, 4, 9, 0, 0),
datetime(2015, 4, 10, 0, 0)))
assert len(events) == 1
def test_zuluv_events(tmpdir):
"""test if events in Zulu time are correctly recognized as locaized events"""
dbpath = str(tmpdir) + '/khal.db'
db = backend.SQLiteDb(calname, dbpath, locale=LOCALE_BERLIN)
db.update(_get_text('event_dt_simple_zulu'), href='event_zulu')
events = db.get_localized([calname],
BERLIN.localize(datetime(2014, 4, 9, 0, 0)),
BERLIN.localize(datetime(2014, 4, 10, 0, 0)))
events = list(events)
assert len(events) == 1
event = events[0]
assert type(event) == LocalizedEvent
assert event.start_local == BERLIN.localize(datetime(2014, 4, 9, 11, 30))
event_rdate_period = """BEGIN:VEVENT
SUMMARY:RDATE period
DTSTART:19961230T020000Z
DTEND:19961230T060000Z
UID:rdate_period
RDATE;VALUE=PERIOD:19970101T180000Z/19970102T070000Z,19970109T180000Z/PT5H30M
END:VEVENT"""
supported_events = [
event_a, event_b, event_rrule_this_and_future,
event_rrule_this_and_future_allday,
event_rrule_this_and_future_multi_day_shift
]
def test_check_support():
ical = icalendar.Calendar.from_ical(event_rrule_this_and_prior)
for cal_str in supported_events:
ical = icalendar.Calendar.from_ical(cal_str)
[backend.check_support(event, '', '') for event in ical.walk()]
ical = icalendar.Calendar.from_ical(event_rrule_this_and_prior)
with pytest.raises(UpdateFailed):
[backend.check_support(event, '', '') for event in ical.walk()]
ical = icalendar.Calendar.from_ical(event_rdate_period)
with pytest.raises(UpdateFailed):
[backend.check_support(event, '', '') for event in ical.walk()]
card = """BEGIN:VCARD
VERSION:3.0
FN:Unix
BDAY:19710311
END:VCARD
"""
card_no_year = """BEGIN:VCARD
VERSION:3.0
FN:Unix
BDAY:--0311
END:VCARD
"""
card_does_not_parse = """BEGIN:VCARD
VERSION:3.0
FN:Unix
BDAY:x
END:VCARD
"""
card_no_fn = """BEGIN:VCARD
VERSION:3.0
N:Ritchie;Dennis;MacAlistair;;
BDAY:19410909
END:VCARD
"""
day = date(1971, 3, 11)
start = datetime.combine(day, time.min)
end = datetime.combine(day, time.max)
def test_birthdays(tmpdir):
dbpath = str(tmpdir) + '/khal.db'
db = backend.SQLiteDb_Birthdays(calname, dbpath, locale=LOCALE_BERLIN)
assert list(db.get_floating([calname], start, end)) == list()
db.update(card, 'unix.vcf')
events = list(db.get_floating([calname], start, end))
assert len(events) == 1
assert events[0].summary == 'Unix\'s birthday'
def test_birthdays_no_year(tmpdir):
dbpath = str(tmpdir) + '/khal.db'
db = backend.SQLiteDb_Birthdays(calname, dbpath, locale=LOCALE_BERLIN)
assert list(db.get_floating([calname], start, end)) == list()
db.update(card_no_year, 'unix.vcf')
events = list(db.get_floating([calname], start, end))
assert len(events) == 1
assert events[0].summary == 'Unix\'s birthday'
def test_birthdays_no_fn(tmpdir):
dbpath = str(tmpdir) + '/khal.db'
db = backend.SQLiteDb_Birthdays('home', dbpath, locale=LOCALE_BERLIN)
assert list(db.get_floating(['home'],
datetime(1941, 9, 9, 0, 0),
datetime(1941, 9, 9, 23, 59, 59, 9999))) == list()
db.update(card_no_fn, 'unix.vcf')
events = list(db.get_floating(['home'],
datetime(1941, 9, 9, 0, 0),
datetime(1941, 9, 9, 23, 59, 59, 9999)))
assert len(events) == 1
assert events[0].summary == 'Dennis MacAlistair Ritchie\'s birthday'
def test_birthday_does_not_parse(tmpdir):
dbpath = str(tmpdir) + '/khal.db'
db = backend.SQLiteDb_Birthdays(calname, dbpath, locale=LOCALE_BERLIN)
assert list(db.get_floating([calname], start, end)) == list()
db.update(card_does_not_parse, 'unix.vcf')
events = list(db.get_floating(calname, start, end))
assert len(events) == 0
| 39.628986
| 96
| 0.651697
| 3,738
| 27,344
| 4.631889
| 0.077047
| 0.07416
| 0.116669
| 0.136652
| 0.849255
| 0.8347
| 0.801317
| 0.748932
| 0.719533
| 0.705787
| 0
| 0.104219
| 0.2052
| 27,344
| 689
| 97
| 39.686502
| 0.692449
| 0.022089
| 0
| 0.604569
| 0
| 0
| 0.183085
| 0.099412
| 0
| 0
| 0
| 0
| 0.29174
| 1
| 0.040422
| false
| 0
| 0.01406
| 0
| 0.054482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bd5d7682bddfa3f1aa002397dd25210156821230
| 25
|
py
|
Python
|
ansys/mapdl/core/_commands/misc/__init__.py
|
da1910/pymapdl
|
305b70b30e61a78011e974ff4cb409ee21f89e13
|
[
"MIT"
] | null | null | null |
ansys/mapdl/core/_commands/misc/__init__.py
|
da1910/pymapdl
|
305b70b30e61a78011e974ff4cb409ee21f89e13
|
[
"MIT"
] | null | null | null |
ansys/mapdl/core/_commands/misc/__init__.py
|
da1910/pymapdl
|
305b70b30e61a78011e974ff4cb409ee21f89e13
|
[
"MIT"
] | null | null | null |
from .misc import verify
| 12.5
| 24
| 0.8
| 4
| 25
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bd671ad173b004454d49ec95ca9b404d57cbec0a
| 31
|
py
|
Python
|
aiogram_dialog_extras/models/__init__.py
|
SamWarden/aiogram_dialog_extras
|
dede383df2b4f34d77fe40459333fdb4e6b8727b
|
[
"MIT"
] | 1
|
2022-02-21T19:28:48.000Z
|
2022-02-21T19:28:48.000Z
|
aiogram_dialog_extras/models/__init__.py
|
SamWarden/aiogram_dialog_extras
|
dede383df2b4f34d77fe40459333fdb4e6b8727b
|
[
"MIT"
] | null | null | null |
aiogram_dialog_extras/models/__init__.py
|
SamWarden/aiogram_dialog_extras
|
dede383df2b4f34d77fe40459333fdb4e6b8727b
|
[
"MIT"
] | null | null | null |
from .text import PositionalVM
| 15.5
| 30
| 0.83871
| 4
| 31
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
bdd085956eec23522e5781a88c137bce77256890
| 5,978
|
py
|
Python
|
tests/test_log_format.py
|
jnbellinger/lta
|
b7cb3c65e0f167e56abb67f8283083aafd700e42
|
[
"MIT"
] | 1
|
2019-07-30T16:03:26.000Z
|
2019-07-30T16:03:26.000Z
|
tests/test_log_format.py
|
jnbellinger/lta
|
b7cb3c65e0f167e56abb67f8283083aafd700e42
|
[
"MIT"
] | 80
|
2019-01-10T21:46:43.000Z
|
2022-03-24T22:40:54.000Z
|
tests/test_log_format.py
|
jnbellinger/lta
|
b7cb3c65e0f167e56abb67f8283083aafd700e42
|
[
"MIT"
] | 1
|
2018-12-10T21:13:11.000Z
|
2018-12-10T21:13:11.000Z
|
# test_log_format.py
"""Unit tests for lta/log_format.py."""
import sys
from requests.exceptions import HTTPError
from .test_util import ObjectLiteral
from lta.log_format import StructuredFormatter
class LiteralRecord(ObjectLiteral):
"""
LiteralRecord is a literal LogRecord.
This class creates an ObjectLiteral that also implements the (getMessage)
method which is often called on LogRecord objects.
This is useful for creating LogRecord literals to be used as return
values from mocked API calls.
"""
def getMessage(self):
"""Format the log message."""
return self.msg % self.args
def test_constructor_default() -> None:
"""Test that StructuredFormatter can be created without any parameters."""
sf = StructuredFormatter()
assert sf.component_type is None
assert sf.component_name is None
assert sf.indent is None
assert sf.separators == (',', ':')
def test_constructor_supplied() -> None:
"""Test that StructuredFormatter can be created with parameters."""
sf = StructuredFormatter(component_type="Picker", component_name="test-picker", ndjson=False)
assert sf.component_type == "Picker"
assert sf.component_name == "test-picker"
assert sf.indent == 4
assert sf.separators == (', ', ': ')
def test_format_default() -> None:
"""Test that StructuredFormatter (no params) provides proper output."""
sf = StructuredFormatter()
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_info=None,
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") == -1
assert json_text.find("component_type") == -1
assert json_text.find("component_name") == -1
assert json_text.find("timestamp") != -1
def test_format_supplied() -> None:
"""Test that StructuredFormatter (with params) provides proper output."""
sf = StructuredFormatter(component_type="Picker", component_name="test-picker", ndjson=False)
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_info=None,
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") != -1
assert json_text.find("component_type") != -1
assert json_text.find("component_name") != -1
assert json_text.find("timestamp") != -1
def test_missing_exc_info() -> None:
"""Test that StructuredFormatter (no params) provides proper output."""
sf = StructuredFormatter()
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") == -1
assert json_text.find("component_type") == -1
assert json_text.find("component_name") == -1
assert json_text.find("timestamp") != -1
def test_exc_info_tuple() -> None:
"""Test that StructuredFormatter (no params) provides proper output."""
sf = StructuredFormatter()
log_record = LiteralRecord(
name="lta.picker",
msg="ConnectionError trying to PATCH /status/picker with heartbeat",
args=[],
levelname="ERROR",
levelno=40,
pathname="/home/pmeade/github/lta/lta/picker.py",
filename="picker.py",
module="picker",
exc_text=None,
stack_info=None,
lineno=102,
funcName="patch_status_heartbeat",
created=1547003161.046467,
msecs=46.46706581115723,
relativeCreated=93.13035011291504,
thread=140013641434880,
threadName="MainThread",
processName="MainProcess",
process=8147
)
try:
raise HTTPError("451 Unavailable For Legal Reasons")
except HTTPError:
log_record.exc_info = sys.exc_info()
json_text = sf.format(log_record)
assert json_text.startswith("{")
assert json_text.endswith("}")
assert json_text.find("\n") == -1
assert json_text.find("component_type") == -1
assert json_text.find("component_name") == -1
assert json_text.find("timestamp") != -1
| 33.027624
| 97
| 0.648545
| 664
| 5,978
| 5.700301
| 0.203313
| 0.059181
| 0.088771
| 0.07609
| 0.782563
| 0.753236
| 0.746367
| 0.723646
| 0.723646
| 0.723646
| 0
| 0.06719
| 0.233188
| 5,978
| 180
| 98
| 33.211111
| 0.758508
| 0.123285
| 0
| 0.793103
| 0
| 0
| 0.181466
| 0.045657
| 0
| 0
| 0
| 0
| 0.22069
| 1
| 0.048276
| false
| 0
| 0.027586
| 0
| 0.089655
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bdeb271aad9ed9df56dd5d571f775c95937cc62a
| 128
|
py
|
Python
|
tests/unit/streaming_hmm/utils.py
|
cylance/perturbed-sequence-model
|
d0904262d8344c9e18905ca82d36f1de685b4840
|
[
"Apache-2.0"
] | 4
|
2019-03-07T09:34:47.000Z
|
2021-01-19T13:41:47.000Z
|
tests/unit/streaming_hmm/utils.py
|
cylance/perturbed-sequence-model
|
d0904262d8344c9e18905ca82d36f1de685b4840
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/streaming_hmm/utils.py
|
cylance/perturbed-sequence-model
|
d0904262d8344c9e18905ca82d36f1de685b4840
|
[
"Apache-2.0"
] | 2
|
2019-07-08T03:30:24.000Z
|
2019-12-10T22:32:18.000Z
|
import numpy as np
import pytest
def get_argmax_of_matrix_as_tuple(mat):
return np.unravel_index(mat.argmax(), mat.shape)
| 18.285714
| 52
| 0.78125
| 22
| 128
| 4.272727
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132813
| 128
| 6
| 53
| 21.333333
| 0.846847
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
da3635eacadebc19d35f706b9e0d5b1a1e492bb3
| 4,170
|
py
|
Python
|
src/TrussModel.py
|
somu15/Small_Pf_code
|
35f3d28faab2aa80f2332499f5e7ab19b040eabe
|
[
"MIT"
] | null | null | null |
src/TrussModel.py
|
somu15/Small_Pf_code
|
35f3d28faab2aa80f2332499f5e7ab19b040eabe
|
[
"MIT"
] | null | null | null |
src/TrussModel.py
|
somu15/Small_Pf_code
|
35f3d28faab2aa80f2332499f5e7ab19b040eabe
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 17 15:24:18 2020
@author: dhulls
"""
from anastruct import SystemElements
import numpy as np
class TrussModel:
def HF(self, young1=None, young2=None, area1=None, area2=None, P1=None, P2=None, P3=None, P4=None, P5=None, P6=None):
ss = SystemElements()
# young1 = 2.1e11
# area1 = 2e-3
# young2 = 2.1e11
# area2 = 1e-3
ss.add_truss_element(location=[[0, 0], [4,0]], EA=(area1*young1))
ss.add_truss_element(location=[[4, 0], [8,0]], EA=(area1*young1))
ss.add_truss_element(location=[[8, 0], [12,0]], EA=(area1*young1))
ss.add_truss_element(location=[[12, 0], [16,0]], EA=(area1*young1))
ss.add_truss_element(location=[[16, 0], [20,0]], EA=(area1*young1))
ss.add_truss_element(location=[[20, 0], [24,0]], EA=(area1*young1))
ss.add_truss_element(location=[[2, 2], [6,2]], EA=(area1*young1))
ss.add_truss_element(location=[[6, 2], [10,2]], EA=(area1*young1))
ss.add_truss_element(location=[[10, 2], [14,2]], EA=(area1*young1))
ss.add_truss_element(location=[[14, 2], [18,2]], EA=(area1*young1))
ss.add_truss_element(location=[[18, 2], [22,2]], EA=(area1*young1))
ss.add_truss_element(location=[[0, 0], [2,2]], EA=(area2*young2))
ss.add_truss_element(location=[[2,2], [4,0]], EA=(area2*young2))
ss.add_truss_element(location=[[4,0], [6,2]], EA=(area2*young2))
ss.add_truss_element(location=[[6,2], [8,0]], EA=(area2*young2))
ss.add_truss_element(location=[[8,0], [10,2]], EA=(area2*young2))
ss.add_truss_element(location=[[10,2], [12,0]], EA=(area2*young2))
ss.add_truss_element(location=[[12,0], [14,2]], EA=(area2*young2))
ss.add_truss_element(location=[[14,2], [16,0]], EA=(area2*young2))
ss.add_truss_element(location=[[16,0], [18,2]], EA=(area2*young2))
ss.add_truss_element(location=[[18,2], [20,0]], EA=(area2*young2))
ss.add_truss_element(location=[[20,0], [22,2]], EA=(area2*young2))
ss.add_truss_element(location=[[22,2], [24,0]], EA=(area2*young2))
ss.add_support_hinged(node_id=1)
ss.add_support_roll(node_id=7, direction='x')
# P1 = -5e4
# P2 = -5e4
# P3 = -5e4
# P4 = -5e4
# P5 = -5e4
# P6 = -5e4
ss.point_load(node_id=8, Fy=P1)
ss.point_load(node_id=9, Fy=P2)
ss.point_load(node_id=10, Fy=P3)
ss.point_load(node_id=11, Fy=P4)
ss.point_load(node_id=12, Fy=P5)
ss.point_load(node_id=13, Fy=P6)
ss.solve()
# ss.show_structure()
# ss.show_displacement(factor=10)
K = ss.get_node_results_system(node_id=4)['uy']
return np.array(K)
def LF(self, young1=None, young2=None, area1=None, area2=None, P1=None, P2=None, P3=None, P4=None, P5=None, P6=None):
ss = SystemElements()
# young1 = 2.1e11
# area1 = 2e-3
# young2 = 2.1e11
# area2 = 1e-3
ss.add_truss_element(location=[[0, 0], [12,0]], EA=(area1*young1))
ss.add_truss_element(location=[[12, 0], [24,0]], EA=(area1*young1))
ss.add_truss_element(location=[[6, 2], [18,2]], EA=(area1*young1))
ss.add_truss_element(location=[[0, 0], [6,2]], EA=(area2*young2))
ss.add_truss_element(location=[[6,2], [12,0]], EA=(area2*young2))
ss.add_truss_element(location=[[12,0], [18,2]], EA=(area2*young2))
ss.add_truss_element(location=[[18,2], [24,0]], EA=(area2*young2))
ss.add_support_hinged(node_id=1)
ss.add_support_roll(node_id=3, direction='x')
# P1 = -5e4
# P2 = -5e4
# P3 = -5e4
# P4 = -5e4
# P5 = -5e4
# P6 = -5e4
ss.point_load(node_id=4, Fy=np.sum([P1,P2,P3]))
ss.point_load(node_id=5, Fy=np.sum([P4,P5,P6]))
ss.solve()
# ss.show_structure()
# ss.show_displacement(factor=10)
K = ss.get_node_results_system(node_id=4)['uy']
return np.array(K)
| 39.714286
| 121
| 0.566427
| 638
| 4,170
| 3.545455
| 0.144201
| 0.075155
| 0.132626
| 0.225464
| 0.901857
| 0.871795
| 0.854996
| 0.840407
| 0.840407
| 0.598585
| 0
| 0.111566
| 0.239089
| 4,170
| 104
| 122
| 40.096154
| 0.601324
| 0.105516
| 0
| 0.181818
| 0
| 0
| 0.001624
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036364
| false
| 0
| 0.036364
| 0
| 0.127273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
da8cf5ee6766a184a85375a8b1a0da356b6a0ab7
| 21
|
py
|
Python
|
nes/processors/cpu/instructions/jump/__init__.py
|
Hexadorsimal/pynes
|
dbb3d40c1240fa27f70fa798bcec09188755eec2
|
[
"MIT"
] | 1
|
2017-05-13T18:57:09.000Z
|
2017-05-13T18:57:09.000Z
|
nes/processors/cpu/instructions/jump/__init__.py
|
Hexadorsimal/py6502
|
dbb3d40c1240fa27f70fa798bcec09188755eec2
|
[
"MIT"
] | 7
|
2020-10-24T17:16:56.000Z
|
2020-11-01T14:10:23.000Z
|
nes/processors/cpu/instructions/jump/__init__.py
|
Hexadorsimal/pynes
|
dbb3d40c1240fa27f70fa798bcec09188755eec2
|
[
"MIT"
] | null | null | null |
from .jmp import Jmp
| 10.5
| 20
| 0.761905
| 4
| 21
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
16f4d195ee3aa3ead33f327e99521686ead40993
| 118
|
py
|
Python
|
appengine_config.py
|
sreejithb/cows_and_bulls
|
1c411e788432cfb73bcaefd16823414425677652
|
[
"Apache-2.0"
] | null | null | null |
appengine_config.py
|
sreejithb/cows_and_bulls
|
1c411e788432cfb73bcaefd16823414425677652
|
[
"Apache-2.0"
] | null | null | null |
appengine_config.py
|
sreejithb/cows_and_bulls
|
1c411e788432cfb73bcaefd16823414425677652
|
[
"Apache-2.0"
] | null | null | null |
from google.appengine.ext import vendor
vendor.add('lib')
vendor.add('lib/nltk')
vendor.add('lib/nltk-3.2.1.egg-info')
| 29.5
| 39
| 0.745763
| 22
| 118
| 4
| 0.636364
| 0.306818
| 0.409091
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026786
| 0.050847
| 118
| 4
| 40
| 29.5
| 0.758929
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0.193277
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
16f4d6d0c44227a924efac04e6497c75e1853893
| 138
|
py
|
Python
|
Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/tests/camera_test.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | 1
|
2020-08-03T00:19:05.000Z
|
2020-08-03T00:19:05.000Z
|
Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/tests/camera_test.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | 1
|
2021-08-23T20:43:21.000Z
|
2021-08-23T20:43:21.000Z
|
Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/tests/camera_test.py
|
JE-Chen/je_old_repo
|
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
|
[
"MIT"
] | null | null | null |
import unittest
import math
import pygame
from pygame.compat import long_
class CameraModuleTest(unittest.TestCase):
pass
| 13.8
| 43
| 0.753623
| 16
| 138
| 6.4375
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210145
| 138
| 9
| 44
| 15.333333
| 0.944954
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.166667
| 0.666667
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
e55f3b02bdf00d14c48cd492a0b1d0aa4649ddff
| 3,398
|
py
|
Python
|
test/crop_image_partial.py
|
mskim99/Pix2Vox_modify
|
0cc28e2c9a4a86c25e570317d2dd296bb8565ff7
|
[
"MIT"
] | null | null | null |
test/crop_image_partial.py
|
mskim99/Pix2Vox_modify
|
0cc28e2c9a4a86c25e570317d2dd296bb8565ff7
|
[
"MIT"
] | null | null | null |
test/crop_image_partial.py
|
mskim99/Pix2Vox_modify
|
0cc28e2c9a4a86c25e570317d2dd296bb8565ff7
|
[
"MIT"
] | null | null | null |
import cv2
import numpy as np
f_index = 6
m_index = 57
c_index = 276
s_index = 298
a_index = 1010
c_x = 136
c_y = 720
s_x = 116
s_y = 692
a_x = 132
a_y = 134
resolution = 256
'''
c_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/f_' + str(f_index).zfill(3) + '/coronal/f_' + str(f_index).zfill(3) + '_Coronal_' + str(c_index).zfill(5) + '.bmp')
if type(c_img) is np.ndarray:
c_cropped_img = c_img[c_y: c_y + resolution, c_x: c_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/f_' + str(f_index).zfill(3) + '_Coronal_cropped.bmp', c_cropped_img)
'''
'''
c_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/f_' + str(f_index).zfill(3) + '/coronal/f_' + str(f_index).zfill(2) + '_Coronal_' + str(c_index).zfill(5) + '.bmp')
if type(c_img) is np.ndarray:
c_cropped_img = c_img[c_y: c_y + resolution, c_x: c_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/f_' + str(f_index).zfill(3) + '_Coronal_cropped.bmp', c_cropped_img)
'''
'''
a_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/f_' + str(f_index).zfill(3) + '/axial/f_' + str(f_index).zfill(3) + '_Axial_' + str(a_index).zfill(5) + '.bmp')
if type(a_img) is np.ndarray:
a_cropped_img = a_img[a_y: a_y + resolution, a_x: a_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/f_' + str(f_index).zfill(3) + '_Axial_cropped.bmp', a_cropped_img)
'''
'''
a_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/f_' + str(f_index).zfill(3) + '/axial/f_' + str(f_index).zfill(3) + '_Axial_' + str(a_index).zfill(5) + '.bmp')
if type(a_img) is np.ndarray:
a_cropped_img = a_img[a_y: a_y + resolution, a_x: a_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/f_' + str(f_index).zfill(3) + '_Axial_cropped.bmp', a_cropped_img)
'''
s_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/f_' + str(f_index).zfill(3) + '/sagittal/f_' + str(f_index).zfill(3) + '_Sagittal_' + str(s_index).zfill(5) + '.bmp')
if type(s_img) is np.ndarray:
s_cropped_img = s_img[s_y: s_y + resolution, s_x: s_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/f_' + str(f_index).zfill(3) + '_Sagittal_cropped.bmp', s_cropped_img)
'''
s_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/m_' + str(m_index).zfill(3) + '/sagittal/m_' + str(m_index).zfill(2) + '_Sagittal_' + str(s_index).zfill(5) + '.bmp')
if type(s_img) is np.ndarray:
s_cropped_img = s_img[s_y: s_y + resolution, s_x: s_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/m_' + str(m_index).zfill(3) + '_Sagittal_cropped.bmp', s_cropped_img)
'''
'''
c_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/m_' + str(m_index).zfill(3) + '/coronal/m_' + str(m_index).zfill(2) + '_Coronal_' + str(c_index).zfill(5) + '.bmp')
if type(c_img) is np.ndarray:
c_cropped_img = c_img[c_y: c_y + resolution, c_x: c_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/m_' + str(m_index).zfill(3) + '_Coronal_cropped.bmp', c_cropped_img)
'''
'''
a_img = cv2.imread('I:/DK_Data_Process/i_1-2_Slices/m_' + str(m_index).zfill(3) + '/axial/m_' + str(m_index).zfill(2) + '_Axial_' + str(a_index).zfill(5) + '.bmp')
if type(a_img) is np.ndarray:
a_cropped_img = a_img[a_y: a_y + resolution, a_x: a_x + resolution]
cv2.imwrite('I:/DK_Data_Process/i_1-2_Slices_Cropped/m_' + str(m_index).zfill(3) + '_Axial_cropped.bmp', a_cropped_img)
'''
| 49.970588
| 169
| 0.688935
| 657
| 3,398
| 3.16895
| 0.074581
| 0.153698
| 0.105668
| 0.107589
| 0.947166
| 0.947166
| 0.929395
| 0.907781
| 0.907781
| 0.877041
| 0
| 0.0385
| 0.120954
| 3,398
| 68
| 170
| 49.970588
| 0.65852
| 0
| 0
| 0
| 0
| 0
| 0.208829
| 0.164686
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e5b563dc7359212d36bd1f5207000da3badf73ca
| 59
|
py
|
Python
|
Exercício feitos pela primeira vez/ex001colorido.py
|
Claayton/pythonExerciciosLinux
|
696cdb16983638418bd0d0d4fe44dc72662b9c97
|
[
"MIT"
] | 1
|
2021-01-23T15:43:34.000Z
|
2021-01-23T15:43:34.000Z
|
Exercício feitos pela primeira vez/ex001colorido.py
|
Claayton/pythonExerciciosLinux
|
696cdb16983638418bd0d0d4fe44dc72662b9c97
|
[
"MIT"
] | null | null | null |
Exercício feitos pela primeira vez/ex001colorido.py
|
Claayton/pythonExerciciosLinux
|
696cdb16983638418bd0d0d4fe44dc72662b9c97
|
[
"MIT"
] | null | null | null |
#Ex001b
print('\033[0;35m''Olá Mundo!''\033[m')
print('xD')
| 19.666667
| 39
| 0.627119
| 11
| 59
| 3.363636
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0.050847
| 59
| 3
| 40
| 19.666667
| 0.446429
| 0.101695
| 0
| 0
| 0
| 0
| 0.528302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
e5e6a7d0a9cb3e060f6899f51265ff5f39abde94
| 132
|
py
|
Python
|
aim/ql/tree/__init__.py
|
VkoHov/aim
|
92567b48437a8c71b4bde3b034fc0e5c61479cf9
|
[
"Apache-2.0"
] | 1
|
2021-07-19T19:21:30.000Z
|
2021-07-19T19:21:30.000Z
|
aim/ql/tree/__init__.py
|
VkoHov/aim
|
92567b48437a8c71b4bde3b034fc0e5c61479cf9
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:17:16.000Z
|
2022-02-10T05:49:55.000Z
|
aim/ql/tree/__init__.py
|
paulmchen/aim
|
53212cdce7a80cb8dadfaf7869a31fbf4ee6ce5b
|
[
"Apache-2.0"
] | 1
|
2021-01-29T02:10:14.000Z
|
2021-01-29T02:10:14.000Z
|
from aim.ql.tree.abstract_syntax_tree import AbstractSyntaxTree
from aim.ql.tree.binary_expression_tree import BinaryExpressionTree
| 44
| 67
| 0.893939
| 18
| 132
| 6.333333
| 0.611111
| 0.122807
| 0.157895
| 0.22807
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 132
| 2
| 68
| 66
| 0.919355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e5fc209ca08202941e867302ba87d42897fb69b7
| 5,944
|
py
|
Python
|
src/feature_extract.py
|
JiJingYu/Sensor-Specific-Hyperspectral-Image-Feature-Learning
|
de0ddec567fb8b47b37cffc6215c51533ac35a56
|
[
"Apache-2.0"
] | 1
|
2017-08-14T03:21:00.000Z
|
2017-08-14T03:21:00.000Z
|
src/feature_extract.py
|
JiJingYu/Sensor-Specific-Hyperspectral-Image-Feature-Learning
|
de0ddec567fb8b47b37cffc6215c51533ac35a56
|
[
"Apache-2.0"
] | null | null | null |
src/feature_extract.py
|
JiJingYu/Sensor-Specific-Hyperspectral-Image-Feature-Learning
|
de0ddec567fb8b47b37cffc6215c51533ac35a56
|
[
"Apache-2.0"
] | 1
|
2021-02-16T00:04:52.000Z
|
2021-02-16T00:04:52.000Z
|
import os
import sys
import stat
import h5py
import time
import shutil
import subprocess
import numpy as np
import scipy.io as sio
from data_analysis import find_caffe
# import caffe
import data_analysis.get_feature_from_model as feature
caffe_root = find_caffe.caffe_root
def mkdir_if_not_exist(the_dir):
if not os.path.isdir(the_dir) :
os.makedirs(the_dir)
def get_indian_pines_features_from_indian_pines_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('../hyperspectral_datas/indian_pines/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['labels']
data.result_dir = '../result/indian_pines/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/ip_feature_ip_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/indian_pines_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
def get_salina_features_from_salina_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('~/hyperspectral_datas/salina/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['labels']
data.result_dir = '../result/salina/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/salina_feature_salina_5x5_mean_std_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/salina_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
def get_indian_pines_features_from_salina_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('../hyperspectral_datas/indian_pines/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/indian_pines_5x5_mean_std.mat')['labels']
data.result_dir = '../result/salina/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/ip_feature_salina_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/salina_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
def get_salina_features_from_indian_pines_model():
for i in range(10):
class data: pass
data.data_dir = os.path.expanduser('../hyperspectral_datas/salina/data/')
data.data_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['data']
data.labels_5x5_mean_std = sio.loadmat(data.data_dir + '/salina_5x5_mean_std.mat')['labels']
data.result_dir = '../result/indian_pines/bn_net_200/feature'
mkdir_if_not_exist(data.result_dir)
data.result_file = data.result_dir + '/salina_feature_ip_model_{}.mat'.format(i)
data.iters = 2000000
pretrained_model = data.result_dir + '/../model/5x5_mean_std_models_time_{}_iter_{}.caffemodel.h5'.format(i,
data.iters)
deploy_file = data.result_dir + '/../proto/indian_pines_5x5_mean_std_deploy.prototxt'
getFeature = feature.GetFeatureFromCaffe(deploy_file=deploy_file, pretrained_model=pretrained_model)
getFeature.set_data(data.data_5x5_mean_std, data.labels_5x5_mean_std)
getFeature.get_ip1()
data.result_dict = {'data': getFeature.ip1_data, 'labels': getFeature.label}
sio.savemat(data.result_file, data.result_dict)
if __name__ == '__main__':
start = time.time()
get_indian_pines_features_from_indian_pines_model()
get_salina_features_from_salina_model()
get_indian_pines_features_from_salina_model()
get_salina_features_from_indian_pines_model()
end = time.time()
print(end - start)
| 47.174603
| 125
| 0.672948
| 777
| 5,944
| 4.731017
| 0.105534
| 0.097933
| 0.089771
| 0.032644
| 0.9037
| 0.9037
| 0.894178
| 0.871056
| 0.855822
| 0.855822
| 0
| 0.027471
| 0.222241
| 5,944
| 125
| 126
| 47.552
| 0.767683
| 0.002019
| 0
| 0.652632
| 0
| 0
| 0.197639
| 0.182799
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0.042105
| 0.115789
| 0
| 0.210526
| 0.010526
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f90cf0f97c31a2ef1009840a342d830f1f3064e0
| 109
|
py
|
Python
|
test.py
|
igelbox/blender-ogf
|
47ae0efcba8420560aaabeed92e5ac3934befebf
|
[
"BSD-2-Clause"
] | 5
|
2015-12-23T17:36:20.000Z
|
2021-12-29T07:16:10.000Z
|
test.py
|
igelbox/blender-ogf
|
47ae0efcba8420560aaabeed92e5ac3934befebf
|
[
"BSD-2-Clause"
] | 1
|
2020-03-08T04:37:19.000Z
|
2020-03-16T21:36:29.000Z
|
test.py
|
igelbox/blender-ogf
|
47ae0efcba8420560aaabeed92e5ac3934befebf
|
[
"BSD-2-Clause"
] | 3
|
2019-05-13T18:30:07.000Z
|
2021-12-29T07:18:51.000Z
|
#!/usr/bin/python
from io_scene_ogf.ogf_import import load, ImportContext
load(ImportContext('test.ogf'))
| 15.571429
| 55
| 0.779817
| 16
| 109
| 5.125
| 0.6875
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091743
| 109
| 6
| 56
| 18.166667
| 0.828283
| 0.146789
| 0
| 0
| 0
| 0
| 0.086957
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
005dbb89ca4180bf1f665f682415ccf5bbcffe60
| 129
|
py
|
Python
|
tests/pytests/pyre/complex-facility.py
|
willic3/pythia
|
2657b95a0c07fd3c914ab6b5f7ec89a8edba004c
|
[
"BSD-3-Clause"
] | 1
|
2015-11-30T08:01:39.000Z
|
2015-11-30T08:01:39.000Z
|
tests/pytests/pyre/complex-facility.py
|
willic3/pythia
|
2657b95a0c07fd3c914ab6b5f7ec89a8edba004c
|
[
"BSD-3-Clause"
] | 27
|
2018-05-24T18:31:25.000Z
|
2021-10-16T03:57:52.000Z
|
tests/pytests/pyre/complex-facility.py
|
willic3/pythia
|
2657b95a0c07fd3c914ab6b5f7ec89a8edba004c
|
[
"BSD-3-Clause"
] | 7
|
2019-07-19T02:30:56.000Z
|
2021-06-02T22:00:01.000Z
|
#!/usr/bin/env python
def simple():
from TestComponents import ComplexFacility
return ComplexFacility()
# End of file
| 14.333333
| 46
| 0.72093
| 15
| 129
| 6.2
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193798
| 129
| 8
| 47
| 16.125
| 0.894231
| 0.248062
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
00d4022f46a8206bf28e6ae7526a0393ab54a27e
| 1,157
|
py
|
Python
|
tests/mock.py
|
asyncee/pycamunda
|
f4834d224ff99fcf80874efeaedf68a8a2efa926
|
[
"MIT"
] | null | null | null |
tests/mock.py
|
asyncee/pycamunda
|
f4834d224ff99fcf80874efeaedf68a8a2efa926
|
[
"MIT"
] | null | null | null |
tests/mock.py
|
asyncee/pycamunda
|
f4834d224ff99fcf80874efeaedf68a8a2efa926
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import requests
def raise_requests_exception_mock(*args, **kwargs):
raise requests.exceptions.RequestException
def not_ok_response_mock(*args, **kwargs):
class Response:
ok = False
text = 'text'
content = 'content'
def __bool__(self):
return bool(self.ok)
def json(self):
return {
'message': 'an error message',
'count': 1,
'bpmn20Xml': '<my>test</xml>'
}
return Response()
def response_mock(*args, **kwargs):
class Response:
ok = True
text = 'text'
content = 'content'
def __bool__(self):
return bool(self.ok)
def json(self):
return {
'message': 'an error message',
'count': 1,
'bpmn20Xml': '<my>test</xml>'
}
return Response()
def count_response_mock(*args, **kwargs):
class Response:
ok = True
def __bool__(self):
return bool(self.ok)
def json(self):
return {'count': 1}
return Response()
| 19.948276
| 51
| 0.501296
| 115
| 1,157
| 4.86087
| 0.295652
| 0.085868
| 0.100179
| 0.118068
| 0.749553
| 0.749553
| 0.749553
| 0.683363
| 0.536673
| 0.536673
| 0
| 0.011127
| 0.378565
| 1,157
| 57
| 52
| 20.298246
| 0.766342
| 0.01815
| 0
| 0.74359
| 0
| 0
| 0.113757
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25641
| false
| 0
| 0.025641
| 0.153846
| 0.769231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
dacc8f25df92894254d1538366466ec37b36c84e
| 139
|
py
|
Python
|
src/MQTT/Message/Formatters/JsonFormatter.py
|
dashford/sentinel-client
|
f70acbf3f5ba2cde6e7bd83dc729d0f0d727aedf
|
[
"MIT"
] | null | null | null |
src/MQTT/Message/Formatters/JsonFormatter.py
|
dashford/sentinel-client
|
f70acbf3f5ba2cde6e7bd83dc729d0f0d727aedf
|
[
"MIT"
] | 1
|
2021-03-25T21:49:04.000Z
|
2021-03-25T21:49:04.000Z
|
src/MQTT/Message/Formatters/JsonFormatter.py
|
dashford/sentinel-client
|
f70acbf3f5ba2cde6e7bd83dc729d0f0d727aedf
|
[
"MIT"
] | null | null | null |
import json
class JsonFormatter:
def __init__(self):
pass
def format(self, message):
return json.dumps(message)
| 13.9
| 34
| 0.640288
| 16
| 139
| 5.3125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.280576
| 139
| 9
| 35
| 15.444444
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.166667
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
973880ed2438fa3b7ce62e170a617d7cfc748f20
| 3,164
|
py
|
Python
|
study/vanilla/models.py
|
NunoEdgarGFlowHub/wavetorch
|
e6171b69e66e16a2ba4c76869d4e0b15a3e9861f
|
[
"MIT"
] | 470
|
2019-04-30T00:49:21.000Z
|
2022-03-20T08:31:59.000Z
|
study/vanilla/models.py
|
geofiber/wavetorch
|
927ad02dc9db83f72b8df1d91418a6681e60fd56
|
[
"MIT"
] | 8
|
2019-04-30T01:06:36.000Z
|
2021-07-18T06:24:56.000Z
|
study/vanilla/models.py
|
geofiber/wavetorch
|
927ad02dc9db83f72b8df1d91418a6681e60fd56
|
[
"MIT"
] | 76
|
2019-04-30T09:40:39.000Z
|
2022-03-08T18:38:13.000Z
|
import torch
import torch.nn as nn
from torch.nn import functional as F
class CustomRNN(nn.Module):
def __init__(self, input_size, output_size, hidden_size, batch_first=True, W_scale=1e-1, f_hidden=None):
super(CustomRNN, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.f_hidden = f_hidden
self.W1 = nn.Parameter((torch.rand(hidden_size, input_size)-0.5)*W_scale)
self.W2 = nn.Parameter((torch.rand(hidden_size, hidden_size)-0.5)*W_scale)
self.W3 = nn.Parameter((torch.rand(output_size, hidden_size)-0.5)*W_scale)
self.b_h = nn.Parameter(torch.zeros(hidden_size))
def forward(self, x):
h1 = torch.zeros(x.shape[0], self.hidden_size)
ys = []
for i, xi in enumerate(x.chunk(x.size(1), dim=1)):
h1 = (torch.matmul(self.W2, h1.t()) + torch.matmul(self.W1, xi.t())).t() + self.b_h
if self.f_hidden is not None:
h1 = getattr(F, self.f_hidden)(h1)
y = torch.matmul(self.W3, h1.t()).t()
ys.append(y)
ys = torch.stack(ys, dim=1)
return ys
class CustomRes(nn.Module):
def __init__(self, input_size, output_size, hidden_size, batch_first=True, W_scale=1e-1, f_hidden=None):
super(CustomRes, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.f_hidden = f_hidden
self.W1 = torch.nn.Parameter((torch.rand(hidden_size, input_size)-0.5)*W_scale)
self.W2 = torch.nn.Parameter((torch.rand(hidden_size, hidden_size)-0.5)*W_scale)
self.W3 = torch.nn.Parameter((torch.rand(output_size, hidden_size)-0.5)*W_scale)
self.b_h = torch.nn.Parameter(torch.zeros(hidden_size))
def forward(self, x):
h1 = torch.zeros(x.shape[0], self.hidden_size)
ys = []
for i, xi in enumerate(x.chunk(x.size(1), dim=1)):
hprev = h1
h1 = (torch.matmul(self.W2, h1.t()) + torch.matmul(self.W1, xi.t())).t() + self.b_h
if self.f_hidden is not None:
h1 = getattr(F, self.f_hidden)(h1)
y = torch.matmul(self.W3, h1.t()).t()
ys.append(y)
h1 = h1 + hprev
ys = torch.stack(ys, dim=1)
return ys
class CustomLSTM(nn.Module):
def __init__(self, input_size, output_size, hidden_size, batch_first=True, W_scale=1e-1):
super(CustomLSTM, self).__init__()
self.input_size = input_size
self.output_size = output_size
self.hidden_size = hidden_size
self.lstm = nn.LSTM(input_size, hidden_size, batch_first=batch_first)
self.W3 = torch.nn.Parameter((torch.rand(output_size, hidden_size)-0.5))
def forward(self, x):
# out should have size [N_batch, T, N_hidden]
out, hidden = self.lstm(x.unsqueeze(2))
# print(torch.max(x, 1))
# print(x[:, 100])
# print(out[:, 100, 0].detach())
# ys should have size [N_batch, T, N_classes]
ys = torch.matmul(out, self.W3.t())
return ys
| 39.061728
| 108
| 0.611884
| 486
| 3,164
| 3.771605
| 0.150206
| 0.125477
| 0.091653
| 0.076378
| 0.834152
| 0.821058
| 0.821058
| 0.791598
| 0.791598
| 0.757774
| 0
| 0.027754
| 0.24842
| 3,164
| 81
| 109
| 39.061728
| 0.743061
| 0.050253
| 0
| 0.606557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098361
| false
| 0
| 0.04918
| 0
| 0.245902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
975c1f54f27bcab1c58a9d751466c9d1baeae037
| 4,118
|
py
|
Python
|
imcsdk/mometa/memory/MemoryPersistentMemoryDimms.py
|
ecoen66/imcsdk
|
b10eaa926a5ee57cea7182ae0adc8dd1c818b0ab
|
[
"Apache-2.0"
] | 31
|
2016-06-14T07:23:59.000Z
|
2021-09-12T17:17:26.000Z
|
imcsdk/mometa/memory/MemoryPersistentMemoryDimms.py
|
sthagen/imcsdk
|
1831eaecb5960ca03a8624b1579521749762b932
|
[
"Apache-2.0"
] | 109
|
2016-05-25T03:56:56.000Z
|
2021-10-18T02:58:12.000Z
|
imcsdk/mometa/memory/MemoryPersistentMemoryDimms.py
|
sthagen/imcsdk
|
1831eaecb5960ca03a8624b1579521749762b932
|
[
"Apache-2.0"
] | 67
|
2016-05-17T05:53:56.000Z
|
2022-03-24T15:52:53.000Z
|
"""This module contains the general information for MemoryPersistentMemoryDimms ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class MemoryPersistentMemoryDimmsConsts:
SOCKET_ID_1 = "1"
SOCKET_ID_2 = "2"
SOCKET_ID_3 = "3"
SOCKET_ID_4 = "4"
SOCKET_ID_ALL = "ALL"
class MemoryPersistentMemoryDimms(ManagedObject):
"""This is MemoryPersistentMemoryDimms class."""
consts = MemoryPersistentMemoryDimmsConsts()
naming_props = set(['socketId'])
mo_meta = {
"classic": MoMeta("MemoryPersistentMemoryDimms", "memoryPersistentMemoryDimms", "pmemory-dimms-[socket_id]", VersionMeta.Version404b, "InputOutput", 0x3f, [], ["admin", "read-only", "user"], ['memoryPersistentMemoryLogicalConfiguration'], [], [None]),
"modular": MoMeta("MemoryPersistentMemoryDimms", "memoryPersistentMemoryDimms", "pmemory-dimms-[socket_id]", VersionMeta.Version404b, "InputOutput", 0x3f, [], ["admin", "read-only", "user"], ['memoryPersistentMemoryLogicalConfiguration'], [], [None])
}
prop_meta = {
"classic": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"socket_id": MoPropertyMeta("socket_id", "socketId", "string", VersionMeta.Version404b, MoPropertyMeta.NAMING, 0x8, 0, 510, None, ["1", "2", "3", "4", "ALL"], []),
"socket_local_dimm_numbers": MoPropertyMeta("socket_local_dimm_numbers", "socketLocalDimmNumbers", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x10, 0, 510, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version404b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
},
"modular": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"socket_id": MoPropertyMeta("socket_id", "socketId", "string", VersionMeta.Version404b, MoPropertyMeta.NAMING, 0x8, 0, 510, None, ["1", "2", "3", "4", "ALL"], []),
"socket_local_dimm_numbers": MoPropertyMeta("socket_local_dimm_numbers", "socketLocalDimmNumbers", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x10, 0, 510, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version404b, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version404b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
},
}
prop_map = {
"classic": {
"dn": "dn",
"rn": "rn",
"socketId": "socket_id",
"socketLocalDimmNumbers": "socket_local_dimm_numbers",
"status": "status",
"childAction": "child_action",
},
"modular": {
"dn": "dn",
"rn": "rn",
"socketId": "socket_id",
"socketLocalDimmNumbers": "socket_local_dimm_numbers",
"status": "status",
"childAction": "child_action",
},
}
def __init__(self, parent_mo_or_dn, socket_id, **kwargs):
self._dirty_mask = 0
self.socket_id = socket_id
self.socket_local_dimm_numbers = None
self.status = None
self.child_action = None
ManagedObject.__init__(self, "MemoryPersistentMemoryDimms", parent_mo_or_dn, **kwargs)
| 50.839506
| 259
| 0.63696
| 380
| 4,118
| 6.7
| 0.215789
| 0.050275
| 0.131972
| 0.197958
| 0.7337
| 0.7337
| 0.7337
| 0.7337
| 0.7337
| 0.7337
| 0
| 0.036061
| 0.19864
| 4,118
| 80
| 260
| 51.475
| 0.735455
| 0.03254
| 0
| 0.474576
| 0
| 0
| 0.268882
| 0.127644
| 0
| 0
| 0.010574
| 0
| 0
| 1
| 0.016949
| false
| 0
| 0.050847
| 0
| 0.271186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c11d750154b201d79834a22e2f1a986b3d527cf0
| 138
|
py
|
Python
|
scripts/reactor/autogen_aquaItem3.py
|
hsienjan/SideQuest-Server
|
3e88debaf45615b759d999255908f99a15283695
|
[
"MIT"
] | null | null | null |
scripts/reactor/autogen_aquaItem3.py
|
hsienjan/SideQuest-Server
|
3e88debaf45615b759d999255908f99a15283695
|
[
"MIT"
] | null | null | null |
scripts/reactor/autogen_aquaItem3.py
|
hsienjan/SideQuest-Server
|
3e88debaf45615b759d999255908f99a15283695
|
[
"MIT"
] | null | null | null |
# ParentID: 2302006
# Character field ID when accessed: 230010400
# ObjectID: 1000016
# Object Position Y: 559
# Object Position X: -1813
| 23
| 45
| 0.753623
| 18
| 138
| 5.777778
| 0.888889
| 0.269231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 0.166667
| 138
| 5
| 46
| 27.6
| 0.643478
| 0.92029
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c14553999e73da9a84d6c4acc861791c7ee7584c
| 4,605
|
py
|
Python
|
tests/avalon_test_framework/tests/work_order_tests/test_submit_getresult.py
|
jinengandhi-intel/avalon
|
1a5482c517197dba681dc47863f5d97a65a4c930
|
[
"Apache-2.0"
] | null | null | null |
tests/avalon_test_framework/tests/work_order_tests/test_submit_getresult.py
|
jinengandhi-intel/avalon
|
1a5482c517197dba681dc47863f5d97a65a4c930
|
[
"Apache-2.0"
] | 2
|
2020-05-26T10:51:20.000Z
|
2020-07-27T06:48:04.000Z
|
tests/avalon_test_framework/tests/work_order_tests/test_submit_getresult.py
|
jinengandhi-intel/avalon
|
1a5482c517197dba681dc47863f5d97a65a4c930
|
[
"Apache-2.0"
] | 2
|
2020-07-14T04:10:07.000Z
|
2020-07-17T07:35:37.000Z
|
import pytest
import logging
from src.libs.avalon_test_base import AvalonBase
from src.libs.verification_libs \
import verify_test, check_negative_test_responses,\
validate_response_code
from src.libs.pre_processing_libs \
import ResultStatus
from conftest import env
logger = logging.getLogger(__name__)
@pytest.mark.usefixtures("setup_teardown")
class TestClass():
test_obj = AvalonBase()
pytestmark = pytest.mark.setup_teardown_data(
test_obj, "WorkOrderGetResult")
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.proxy
def test_workordergetresult_success(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (
verify_test(
result_response, 0,
self.test_obj.setup_output['pre_test_output'],
self.test_obj.setup_output['pre_test_workorder_output'])
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_workorderid_different(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_workorderid_specialchar(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_workorderid_null(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_workorderid_nonhexstring(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (
check_negative_test_responses(
result_response,
"Work order Id not found in the database. "
"Hence invalid parameter")
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_workorderid_alphabetsonly(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_workorderid_withoutquotes(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_emptyparameter(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (validate_response_code(result_response, 2)
is ResultStatus.SUCCESS.value)
@pytest.mark.sdk
@pytest.mark.proxy
@pytest.mark.listener
def test_workordergetresult_unknownparameter(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'])
assert (validate_response_code(result_response, 2)
is ResultStatus.SUCCESS.value)
@pytest.mark.listener
def test_workordergetresult_workorderId_empty(self):
result_response = self.test_obj.run_test(
env['work_order_getresult_input_file'],
direct_avalon_listener=True)
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is ResultStatus.SUCCESS.value)
| 29.902597
| 72
| 0.656026
| 498
| 4,605
| 5.736948
| 0.154618
| 0.105005
| 0.046202
| 0.077004
| 0.774239
| 0.774239
| 0.764438
| 0.728736
| 0.728736
| 0.728736
| 0
| 0.000892
| 0.26949
| 4,605
| 153
| 73
| 30.098039
| 0.848395
| 0
| 0
| 0.725
| 0
| 0
| 0.124213
| 0.072747
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.083333
| false
| 0
| 0.05
| 0
| 0.158333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c1791e2cb934ee14d6a039c1910efeee4019e42b
| 3,803
|
py
|
Python
|
python/RawNet2/dataloader.py
|
ishine/RawNet
|
cddec5afa27049a4b507f3d48bb02b993ea838bb
|
[
"MIT"
] | 199
|
2019-04-22T09:02:09.000Z
|
2022-03-31T08:54:03.000Z
|
python/RawNet2/dataloader.py
|
ishine/RawNet
|
cddec5afa27049a4b507f3d48bb02b993ea838bb
|
[
"MIT"
] | 23
|
2019-05-23T01:44:18.000Z
|
2022-03-31T07:03:47.000Z
|
python/RawNet2/dataloader.py
|
ishine/RawNet
|
cddec5afa27049a4b507f3d48bb02b993ea838bb
|
[
"MIT"
] | 51
|
2019-04-18T07:21:01.000Z
|
2022-01-12T13:26:59.000Z
|
import numpy as np
import soundfile as sf
from torch.utils import data
class Dataset_VoxCeleb2(data.Dataset):
def __init__(self, list_IDs, base_dir, nb_samp = 0, labels = {}, cut = True, return_label = True, norm_scale = True):
'''
self.list_IDs : list of strings (each string: utt key)
self.labels : dictionary (key: utt key, value: label integer)
self.nb_samp : integer, the number of timesteps for each mini-batch
cut : (boolean) adjust utterance duration for mini-batch construction
return_label : (boolean)
norm_scale : (boolean) normalize scale alike SincNet github repo
'''
self.list_IDs = list_IDs
self.nb_samp = nb_samp
self.base_dir = base_dir
self.labels = labels
self.cut = cut
self.return_label = return_label
self.norm_scale = norm_scale
if self.cut and self.nb_samp == 0: raise ValueError('when adjusting utterance length, "nb_samp" should be input')
def __len__(self):
return len(self.list_IDs)
def __getitem__(self, index):
ID = self.list_IDs[index]
try:
X, _ = sf.read(self.base_dir+ID)
X = X.astype(np.float64)
except:
raise ValueError('%s'%ID)
if self.norm_scale:
X = self._normalize_scale(X).astype(np.float32)
X = X.reshape(1,-1) #because of LayerNorm for the input
if self.cut:
nb_time = X.shape[1]
if nb_time > self.nb_samp:
start_idx = np.random.randint(low = 0, high = nb_time - self.nb_samp)
X = X[:, start_idx : start_idx + self.nb_samp][0]
elif nb_time < self.nb_samp:
nb_dup = int(self.nb_samp / nb_time) + 1
X = np.tile(X, (1, nb_dup))[:, :self.nb_samp][0]
else:
X = X[0]
if not self.return_label:
return X
y = self.labels[ID.split('/')[0]]
return X, y
def _normalize_scale(self, x):
'''
Normalize sample scale alike SincNet.
'''
return x/np.max(np.abs(x))
class TA_Dataset_VoxCeleb2(data.Dataset):
def __init__(self, list_IDs, base_dir, nb_samp = 0, window_size = 0, labels = {}, cut = True, return_label = True, norm_scale = True):
'''
self.list_IDs : list of strings (each string: utt key)
self.labels : dictionary (key: utt key, value: label integer)
self.nb_samp : integer, the number of timesteps for each mini-batch
cut : (boolean) adjust utterance duration for mini-batch construction
return_label : (boolean)
norm_scale : (boolean) normalize scale alike SincNet github repo
'''
self.list_IDs = list_IDs
self.window_size = window_size
self.nb_samp = nb_samp
self.base_dir = base_dir
self.labels = labels
self.cut = cut
self.return_label = return_label
self.norm_scale = norm_scale
if self.cut and self.nb_samp == 0: raise ValueError('when adjusting utterance length, "nb_samp" should be input')
def __len__(self):
return len(self.list_IDs)
def __getitem__(self, index):
ID = self.list_IDs[index]
try:
X, _ = sf.read(self.base_dir+ID)
X = X.astype(np.float64)
except:
raise ValueError('%s'%ID)
if self.norm_scale:
X = self._normalize_scale(X).astype(np.float32)
X = X.reshape(1,-1)
list_X = []
nb_time = X.shape[1]
if nb_time < self.nb_samp:
nb_dup = int(self.nb_samp / nb_time) + 1
list_X.append(np.tile(X, (1, nb_dup))[:, :self.nb_samp][0])
elif nb_time > self.nb_samp:
step = self.nb_samp - self.window_size
iteration = int( (nb_time - self.window_size) / step ) + 1
for i in range(iteration):
if i == 0:
list_X.append(X[:, :self.nb_samp][0])
elif i < iteration - 1:
list_X.append(X[:, i*step : i*step + self.nb_samp][0])
else:
list_X.append(X[:, -self.nb_samp:][0])
else :
list_X.append(X[0])
if not self.return_label:
return list_X
y = self.labels[ID.split('/')[0]]
return list_X, y
def _normalize_scale(self, x):
'''
Normalize sample scale alike SincNet.
'''
return x/np.max(np.abs(x))
| 30.918699
| 135
| 0.677886
| 623
| 3,803
| 3.924559
| 0.17496
| 0.063804
| 0.0818
| 0.035992
| 0.862986
| 0.85317
| 0.85317
| 0.85317
| 0.8
| 0.777914
| 0
| 0.012708
| 0.193006
| 3,803
| 123
| 136
| 30.918699
| 0.783969
| 0.242966
| 0
| 0.632184
| 0
| 0
| 0.041681
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.091954
| false
| 0
| 0.034483
| 0.022989
| 0.241379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c1a5b50a8633f2d58e508672e8ce811d00ecf2fc
| 7,179
|
py
|
Python
|
interpretation/deepseismic_interpretation/dutchf3/tests/test_dataloaders.py
|
elmajdma/seismic-deeplearning
|
bc084abe153509c40b45f8bf0f80dfda1049d7dc
|
[
"MIT"
] | 270
|
2019-12-17T13:40:51.000Z
|
2022-03-20T10:02:11.000Z
|
interpretation/deepseismic_interpretation/dutchf3/tests/test_dataloaders.py
|
elmajdma/seismic-deeplearning
|
bc084abe153509c40b45f8bf0f80dfda1049d7dc
|
[
"MIT"
] | 233
|
2019-12-18T17:59:36.000Z
|
2021-08-03T13:43:49.000Z
|
interpretation/deepseismic_interpretation/dutchf3/tests/test_dataloaders.py
|
elmajdma/seismic-deeplearning
|
bc084abe153509c40b45f8bf0f80dfda1049d7dc
|
[
"MIT"
] | 118
|
2019-12-17T13:41:43.000Z
|
2022-03-29T02:06:36.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""
Tests for TrainLoader and TestLoader classes when overriding the file names of the seismic and label data.
"""
import tempfile
import numpy as np
from deepseismic_interpretation.dutchf3.data import (
get_test_loader,
TrainPatchLoaderWithDepth,
TrainSectionLoaderWithDepth,
)
import pytest
import yacs.config
import os
# npy files dimensions
IL = 5
XL = 10
D = 8
N_CLASSES = 2
CONFIG_FILE = "./experiments/interpretation/dutchf3_patch/configs/unet.yaml"
with open(CONFIG_FILE, "rt") as f_read:
config = yacs.config.load_cfg(f_read)
def generate_npy_files(path, data):
np.save(path, data)
def assert_dimensions(test_section_loader):
assert test_section_loader.labels.shape[0] == IL
assert test_section_loader.labels.shape[1] == XL
assert test_section_loader.labels.shape[2] == D
# Because add_section_depth_channels method add
# 2 extra channels to a 1 channel section
assert test_section_loader.seismic.shape[0] == IL
assert test_section_loader.seismic.shape[2] == XL
assert test_section_loader.seismic.shape[3] == D
def test_TestSectionLoader_should_load_data_from_test1_set():
with open(CONFIG_FILE, "rt") as f_read:
config = yacs.config.load_cfg(f_read)
with tempfile.TemporaryDirectory() as data_dir:
os.makedirs(os.path.join(data_dir, "test_once"))
os.makedirs(os.path.join(data_dir, "splits"))
seimic = np.zeros([IL, XL, D])
generate_npy_files(os.path.join(data_dir, "test_once", "test1_seismic.npy"), seimic)
labels = np.ones([IL, XL, D])
generate_npy_files(os.path.join(data_dir, "test_once", "test1_labels.npy"), labels)
txt_path = os.path.join(data_dir, "splits", "section_test1.txt")
open(txt_path, "a").close()
TestSectionLoader = get_test_loader(config)
config.merge_from_list(["DATASET.ROOT", data_dir])
test_set = TestSectionLoader(config, split="test1")
assert_dimensions(test_set)
def test_TestSectionLoader_should_load_data_from_test2_set():
with tempfile.TemporaryDirectory() as data_dir:
os.makedirs(os.path.join(data_dir, "test_once"))
os.makedirs(os.path.join(data_dir, "splits"))
seimic = np.zeros([IL, XL, D])
generate_npy_files(os.path.join(data_dir, "test_once", "test2_seismic.npy"), seimic)
A = np.load(os.path.join(data_dir, "test_once", "test2_seismic.npy"))
labels = np.ones([IL, XL, D])
generate_npy_files(os.path.join(data_dir, "test_once", "test2_labels.npy"), labels)
txt_path = os.path.join(data_dir, "splits", "section_test2.txt")
open(txt_path, "a").close()
TestSectionLoader = get_test_loader(config)
config.merge_from_list(["DATASET.ROOT", data_dir])
test_set = TestSectionLoader(config, split="test2")
assert_dimensions(test_set)
def test_TestSectionLoader_should_load_data_from_path_override_data():
with tempfile.TemporaryDirectory() as data_dir:
os.makedirs(os.path.join(data_dir, "volume_name"))
os.makedirs(os.path.join(data_dir, "splits"))
seimic = np.zeros([IL, XL, D])
generate_npy_files(os.path.join(data_dir, "volume_name", "seismic.npy"), seimic)
labels = np.ones([IL, XL, D])
generate_npy_files(os.path.join(data_dir, "volume_name", "labels.npy"), labels)
txt_path = os.path.join(data_dir, "splits", "section_volume_name.txt")
open(txt_path, "a").close()
TestSectionLoader = get_test_loader(config)
config.merge_from_list(["DATASET.ROOT", data_dir])
test_set = TestSectionLoader(
config,
split="volume_name",
is_transform=True,
augmentations=None,
seismic_path=os.path.join(data_dir, "volume_name", "seismic.npy"),
label_path=os.path.join(data_dir, "volume_name", "labels.npy"),
)
assert_dimensions(test_set)
def test_TrainPatchLoaderWithDepth_should_fail_on_missing_seismic_file(tmpdir):
"""
Check for exception when training param is empty
"""
# Setup
os.makedirs(os.path.join(tmpdir, "volume_name"))
os.makedirs(os.path.join(tmpdir, "splits"))
labels = np.ones([IL, XL, D])
generate_npy_files(os.path.join(tmpdir, "volume_name", "labels.npy"), labels)
txt_path = os.path.join(tmpdir, "splits", "patch_volume_name.txt")
open(txt_path, "a").close()
config.merge_from_list(["DATASET.ROOT", str(tmpdir)])
# Test
with pytest.raises(Exception) as excinfo:
_ = TrainPatchLoaderWithDepth(
config,
split="volume_name",
is_transform=True,
augmentations=None,
seismic_path=os.path.join(tmpdir, "volume_name", "seismic.npy"),
label_path=os.path.join(tmpdir, "volume_name", "labels.npy"),
)
assert "does not exist" in str(excinfo.value)
def test_TrainPatchLoaderWithDepth_should_fail_on_missing_label_file(tmpdir):
"""
Check for exception when training param is empty
"""
# Setup
os.makedirs(os.path.join(tmpdir, "volume_name"))
os.makedirs(os.path.join(tmpdir, "splits"))
seimic = np.zeros([IL, XL, D])
generate_npy_files(os.path.join(tmpdir, "volume_name", "seismic.npy"), seimic)
txt_path = os.path.join(tmpdir, "splits", "patch_volume_name.txt")
open(txt_path, "a").close()
config.merge_from_list(["DATASET.ROOT", str(tmpdir)])
# Test
with pytest.raises(Exception) as excinfo:
_ = TrainPatchLoaderWithDepth(
config,
split="volume_name",
is_transform=True,
augmentations=None,
seismic_path=os.path.join(tmpdir, "volume_name", "seismic.npy"),
label_path=os.path.join(tmpdir, "volume_name", "labels.npy"),
)
assert "does not exist" in str(excinfo.value)
def test_TrainPatchLoaderWithDepth_should_load_with_one_train_and_label_file(tmpdir):
"""
Check for successful class instantiation w/ single npy file for train & label
"""
# Setup
os.makedirs(os.path.join(tmpdir, "volume_name"))
os.makedirs(os.path.join(tmpdir, "splits"))
seimic = np.zeros([IL, XL, D])
generate_npy_files(os.path.join(tmpdir, "volume_name", "seismic.npy"), seimic)
labels = np.ones([IL, XL, D])
generate_npy_files(os.path.join(tmpdir, "volume_name", "labels.npy"), labels)
txt_dir = os.path.join(tmpdir, "splits")
txt_path = os.path.join(txt_dir, "patch_volume_name.txt")
open(txt_path, "a").close()
config.merge_from_list(["DATASET.ROOT", str(tmpdir)])
# Test
train_set = TrainPatchLoaderWithDepth(
config,
split="volume_name",
is_transform=True,
augmentations=None,
seismic_path=os.path.join(tmpdir, "volume_name", "seismic.npy"),
label_path=os.path.join(tmpdir, "volume_name", "labels.npy"),
)
assert train_set.labels.shape == (IL, XL, D + 2 * config.TRAIN.PATCH_SIZE)
assert train_set.seismic.shape == (IL, XL, D + 2 * config.TRAIN.PATCH_SIZE)
| 33.704225
| 106
| 0.67391
| 965
| 7,179
| 4.778238
| 0.14715
| 0.049447
| 0.082412
| 0.065929
| 0.829104
| 0.813706
| 0.775971
| 0.737367
| 0.735198
| 0.706138
| 0
| 0.004857
| 0.196963
| 7,179
| 212
| 107
| 33.863208
| 0.79497
| 0.071458
| 0
| 0.592593
| 0
| 0
| 0.137362
| 0.022111
| 0
| 0
| 0
| 0
| 0.103704
| 1
| 0.059259
| false
| 0
| 0.044444
| 0
| 0.103704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c1f2fdcd4f2e3479e3eedaa945741c4119828c19
| 3,315
|
py
|
Python
|
pyaz/netappfiles/volume/export_policy/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/netappfiles/volume/export_policy/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/netappfiles/volume/export_policy/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
from .... pyaz_utils import _call_az
def add(account_name, allowed_clients, cifs, nfsv3, nfsv41, pool_name, resource_group, rule_index, unix_read_only, unix_read_write, volume_name, add=None, force_string=None, remove=None, set=None):
'''
Add a new rule to the export policy for a volume.
Required Parameters:
- account_name -- Name of the ANF account.
- allowed_clients -- None
- cifs -- Indication that CIFS protocol is allowed
- nfsv3 -- Indication that NFSv3 protocol is allowed
- nfsv41 -- Indication that NFSv4.1 protocol is allowed
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- rule_index -- None
- unix_read_only -- Indication of read only access
- unix_read_write -- Indication of read and write access
- volume_name -- Name of the ANF volume.
Optional Parameters:
- add -- Add an object to a list of objects by specifying a path and key value pairs. Example: --add property.listProperty <key=value, string or JSON string>
- force_string -- When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON.
- remove -- Remove a property or an element from a list. Example: --remove property.list <indexToRemove> OR --remove propertyToRemove
- set -- Update an object by specifying a property path and value to set. Example: --set property1.property2=<value>
'''
return _call_az("az netappfiles volume export-policy add", locals())
def list(account_name, pool_name, resource_group, volume_name):
'''
List the export policy rules for a volume.
Required Parameters:
- account_name -- Name of the ANF account.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- volume_name -- Name of the ANF volume.
'''
return _call_az("az netappfiles volume export-policy list", locals())
def remove(account_name, pool_name, resource_group, rule_index, volume_name, add=None, force_string=None, remove=None, set=None):
'''
Remove a rule from the export policy for a volume by rule index. The current rules can be obtained by performing the subgroup list command.
Required Parameters:
- account_name -- Name of the ANF account.
- pool_name -- Name of the ANF pool.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- rule_index -- None
- volume_name -- Name of the ANF volume.
Optional Parameters:
- add -- Add an object to a list of objects by specifying a path and key value pairs. Example: --add property.listProperty <key=value, string or JSON string>
- force_string -- When using 'set' or 'add', preserve string literals instead of attempting to convert to JSON.
- remove -- Remove a property or an element from a list. Example: --remove property.list <indexToRemove> OR --remove propertyToRemove
- set -- Update an object by specifying a property path and value to set. Example: --set property1.property2=<value>
'''
return _call_az("az netappfiles volume export-policy remove", locals())
| 54.344262
| 197
| 0.716742
| 478
| 3,315
| 4.864017
| 0.194561
| 0.030968
| 0.03871
| 0.050323
| 0.784946
| 0.784946
| 0.723441
| 0.711398
| 0.692903
| 0.692903
| 0
| 0.004896
| 0.199095
| 3,315
| 60
| 198
| 55.25
| 0.87081
| 0.743288
| 0
| 0
| 0
| 0
| 0.18006
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.142857
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
de1199a2e59944e63c95571dff645e1ac1604aea
| 157
|
py
|
Python
|
seq_alignment/__init__.py
|
ryanlstevens/seq_alignment
|
3e961297b65fbc7c1a526b0214db701d7f20785d
|
[
"MIT"
] | 1
|
2021-09-03T13:37:51.000Z
|
2021-09-03T13:37:51.000Z
|
seq_alignment/__init__.py
|
ryanlstevens/seq_alignment
|
3e961297b65fbc7c1a526b0214db701d7f20785d
|
[
"MIT"
] | 1
|
2021-09-03T13:37:47.000Z
|
2021-09-03T13:37:47.000Z
|
seq_alignment/__init__.py
|
ryanlstevens/seq_alignment
|
3e961297b65fbc7c1a526b0214db701d7f20785d
|
[
"MIT"
] | null | null | null |
# Make global + local similarity class visible to next module
from .global_similarity import global_similarity
from .local_similarity import local_similarity
| 52.333333
| 61
| 0.859873
| 21
| 157
| 6.238095
| 0.52381
| 0.343511
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11465
| 157
| 3
| 62
| 52.333333
| 0.942446
| 0.375796
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a9df2ac8d12328130872291372c11c2c45d47167
| 1,253
|
py
|
Python
|
test/test_combine_overlap_stats.py
|
MelbourneGenomics/iq
|
0b940c0b169fbbfb1750d275b244cbdc075385b1
|
[
"MIT",
"Unlicense"
] | null | null | null |
test/test_combine_overlap_stats.py
|
MelbourneGenomics/iq
|
0b940c0b169fbbfb1750d275b244cbdc075385b1
|
[
"MIT",
"Unlicense"
] | null | null | null |
test/test_combine_overlap_stats.py
|
MelbourneGenomics/iq
|
0b940c0b169fbbfb1750d275b244cbdc075385b1
|
[
"MIT",
"Unlicense"
] | null | null | null |
import StringIO
import unittest
import iq.combine_overlap_stats
class TestCombineOverlapStats(unittest.TestCase):
def test_simple(self):
exons = ['A1CF\t1\t2\t50.00\tALT1,ALT2', 'A2M\t3\t4\t75.00\t']
cds = ['A2M\t5\t6\t83.33\tALT3']
target = StringIO.StringIO()
log = StringIO.StringIO()
iq.combine_overlap_stats.combine(exons, cds, target, log)
lines = target.getvalue().split('\n')
assert len(lines) == 4
assert lines[1] == 'A1CF\t0\t1\t0\t2\t0\t50.00\tALT1,ALT2' # no cds data
assert lines[2] == 'A2M\t5\t3\t6\t4\t83.33\t75.00\tALT3' # data for both
assert lines[3] == ''
def test_case(self):
exons = ['a1CF\t1\t2\t50.00\tALT1,ALT2', 'A2m\t3\t4\t75.00\t']
cds = ['A2M\t5\t6\t83.33\tALT3']
target = StringIO.StringIO()
log = StringIO.StringIO()
iq.combine_overlap_stats.combine(exons, cds, target, log)
lines = target.getvalue().split('\n')
assert len(lines) == 4
assert lines[1] == 'a1CF\t0\t1\t0\t2\t0\t50.00\tALT1,ALT2' # no cds data
assert lines[2] == 'A2m\t5\t3\t6\t4\t83.33\t75.00\tALT3' # data for both
assert lines[3] == ''
if __name__ == '__main__':
unittest.main()
| 37.969697
| 80
| 0.607342
| 186
| 1,253
| 4.005376
| 0.290323
| 0.088591
| 0.053691
| 0.075168
| 0.805369
| 0.805369
| 0.805369
| 0.805369
| 0.805369
| 0.805369
| 0
| 0.111111
| 0.224262
| 1,253
| 32
| 81
| 39.15625
| 0.65535
| 0.040702
| 0
| 0.5
| 0
| 0
| 0.243943
| 0.203843
| 0
| 0
| 0
| 0
| 0.285714
| 1
| 0.071429
| false
| 0
| 0.107143
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a9e97a6beeb395d74f7690974e109cffb06044f0
| 186
|
py
|
Python
|
helper/main_class.py
|
AVM-Martin/CTF-EncryptionAlgorithm
|
1226f3db8d3358e1f50ae2f2722f66dee281507a
|
[
"WTFPL"
] | null | null | null |
helper/main_class.py
|
AVM-Martin/CTF-EncryptionAlgorithm
|
1226f3db8d3358e1f50ae2f2722f66dee281507a
|
[
"WTFPL"
] | null | null | null |
helper/main_class.py
|
AVM-Martin/CTF-EncryptionAlgorithm
|
1226f3db8d3358e1f50ae2f2722f66dee281507a
|
[
"WTFPL"
] | null | null | null |
class Cryptography():
def __init__(self, key):
self.key = key
def encrypt(self, text):
return text
def decrypt(self, text):
return text
def get_key(self):
return self.key
| 14.307692
| 25
| 0.688172
| 28
| 186
| 4.392857
| 0.392857
| 0.170732
| 0.227642
| 0.292683
| 0.341463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198925
| 186
| 12
| 26
| 15.5
| 0.825503
| 0
| 0
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0
| 0.333333
| 0.888889
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
e71e76823fa303686b10dad2c011bc4e524a05b7
| 72
|
py
|
Python
|
tkdet/data/samplers/__init__.py
|
tkhe/tkdetection
|
54e6c112ef2930e755f457e38449736f5743a9ea
|
[
"MIT"
] | 1
|
2020-10-09T02:27:13.000Z
|
2020-10-09T02:27:13.000Z
|
tkdet/data/samplers/__init__.py
|
tkhe/tkdetection
|
54e6c112ef2930e755f457e38449736f5743a9ea
|
[
"MIT"
] | null | null | null |
tkdet/data/samplers/__init__.py
|
tkhe/tkdetection
|
54e6c112ef2930e755f457e38449736f5743a9ea
|
[
"MIT"
] | null | null | null |
from .distributed_sampler import *
from .grouped_batch_sampler import *
| 24
| 36
| 0.833333
| 9
| 72
| 6.333333
| 0.666667
| 0.45614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 72
| 2
| 37
| 36
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e722d90948d86d38931bf220e0f1c736d366fbf1
| 91
|
py
|
Python
|
python_work/Chapter8/making_pizzas.py
|
Elektra-2/python_crash_course_2nd
|
1c8beaddfe037faa3a36e7c384a6ea2f9d560060
|
[
"MIT"
] | 1
|
2020-08-25T18:42:30.000Z
|
2020-08-25T18:42:30.000Z
|
python_work/Chapter8/making_pizzas.py
|
Elektra-2/python_crash_course_2nd
|
1c8beaddfe037faa3a36e7c384a6ea2f9d560060
|
[
"MIT"
] | null | null | null |
python_work/Chapter8/making_pizzas.py
|
Elektra-2/python_crash_course_2nd
|
1c8beaddfe037faa3a36e7c384a6ea2f9d560060
|
[
"MIT"
] | null | null | null |
import pizza
pizza.make_pizza(16, 'pepperoni')
pizza.make_pizza(17, 'cheese', 'ham')
| 9.1
| 37
| 0.692308
| 13
| 91
| 4.692308
| 0.615385
| 0.295082
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0.142857
| 91
| 9
| 38
| 10.111111
| 0.730769
| 0
| 0
| 0
| 0
| 0
| 0.209302
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e7ab5741966205f04ebb377c9b3b8ed591fa3734
| 141
|
py
|
Python
|
setup.py
|
aymericvie/EvoNetworks
|
462622407610b6773fa5618040a6fd69a71029c0
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
aymericvie/EvoNetworks
|
462622407610b6773fa5618040a6fd69a71029c0
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
aymericvie/EvoNetworks
|
462622407610b6773fa5618040a6fd69a71029c0
|
[
"Apache-2.0"
] | null | null | null |
import os
os.system("pip install numpy")
os.system("pip install matplotlib")
os.system("pip install networkx")
os.system("pip install tqdm")
| 23.5
| 35
| 0.758865
| 22
| 141
| 4.863636
| 0.409091
| 0.299065
| 0.411215
| 0.672897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099291
| 141
| 5
| 36
| 28.2
| 0.84252
| 0
| 0
| 0
| 0
| 0
| 0.531915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e7ae54f652b4ff9464e02983f4b3359769726b76
| 198
|
py
|
Python
|
Server/Python/src/dbs/dao/MySQL/BlockParent/Insert.py
|
vkuznet/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 8
|
2015-08-14T04:01:32.000Z
|
2021-06-03T00:56:42.000Z
|
Server/Python/src/dbs/dao/MySQL/BlockParent/Insert.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 162
|
2015-01-07T21:34:47.000Z
|
2021-10-13T09:42:41.000Z
|
Server/Python/src/dbs/dao/MySQL/BlockParent/Insert.py
|
yuyiguo/DBS
|
14df8bbe8ee8f874fe423399b18afef911fe78c7
|
[
"Apache-2.0"
] | 16
|
2015-01-22T15:27:29.000Z
|
2021-04-28T09:23:28.000Z
|
#!/usr/bin/env python
""" DAO Object for BlockParents table """
from dbs.dao.Oracle.BlockParent.Insert import Insert as OraBlockParentInsert
class Insert(OraBlockParentInsert):
pass
| 22
| 76
| 0.737374
| 23
| 198
| 6.347826
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.171717
| 198
| 8
| 77
| 24.75
| 0.890244
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
99c05b35cfc05868503779a5c0ca6a11442475ea
| 36,055
|
py
|
Python
|
inventarios/forms.py
|
angiealejo/CoreM
|
211383f5fb40fe1f2364b2b80597de562507f687
|
[
"MIT"
] | 1
|
2018-03-23T03:58:09.000Z
|
2018-03-23T03:58:09.000Z
|
inventarios/forms.py
|
angiealejo/CoreM
|
211383f5fb40fe1f2364b2b80597de562507f687
|
[
"MIT"
] | null | null | null |
inventarios/forms.py
|
angiealejo/CoreM
|
211383f5fb40fe1f2364b2b80597de562507f687
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Django:
from django.forms import ModelForm
from django.forms import TextInput
from django.forms import Select
# from django.forms import SelectMultiple
from django.forms import ChoiceField
from django.forms import Textarea
from django.forms import CharField
from django.forms import Form
from django.forms import URLInput
# Modelos:
from .models import Almacen
from .models import Articulo
from .models import UdmArticulo
from .models import MovimientoCabecera
# from .models import MovimientoDetalle
from .models import MOVIMIENTO_ESTADO
from .models import MOVIMIENTO_CLASIFICACION
from .models import MOVIMIENTO_TIPO
# from .models import SeccionAlmacen
from trabajos.models import OrdenTrabajo
from seguridad.models import Profile
ALMACEN_ESTADO = (
('ACT', 'ACTIVO'),
('DES', 'DESHABILITADO'),
)
# ----------------- ALMACEN ----------------- #
class AlmacenForm(ModelForm):
class Meta:
model = Almacen
fields = [
'clave',
'descripcion',
'estado',
]
widgets = {
'clave': TextInput(attrs={'class': 'form-control input-sm'}),
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'estado': Select(attrs={'class': 'form-control input-sm'}),
}
# ----------------- UDM ODOMETRO ----------------- #
class UdmArticuloForm(ModelForm):
class Meta:
model = UdmArticulo
fields = '__all__'
widgets = {
'clave': TextInput(attrs={'class': 'form-control input-sm'}),
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
}
# ----------------- ARTICULO ----------------- #
class ArticuloFilterForm(ModelForm):
class Meta:
model = Articulo
fields = [
'clave',
'descripcion',
'tipo',
'clave_jde',
'estado',
'imagen',
'marca',
'modelo',
'numero_parte',
]
widgets = {
'clave': TextInput(attrs={'class': 'form-control input-sm'}),
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'tipo': Select(attrs={'class': 'form-control input-sm'}),
'clave_jde': TextInput(attrs={'class': 'form-control input-sm'}),
}
class ArticuloForm(ModelForm):
class Meta:
model = Articulo
fields = [
'clave',
'descripcion',
'tipo',
'udm',
'observaciones',
'url',
'marca',
'modelo',
'numero_parte',
'stock_seguridad',
'stock_minimo',
'stock_maximo',
'clave_jde',
'estado',
'imagen',
]
widgets = {
'clave': TextInput(attrs={'class': 'form-control input-sm'}),
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'tipo': Select(attrs={'class': 'form-control input-sm'}),
'udm': Select(attrs={'class': 'form-control input-sm'}),
'observaciones': Textarea(
attrs={'class': 'form-control input-sm'}),
'url': URLInput(attrs={'class': 'form-control input-sm', 'placeholder':'http://www.website.com'}),
'stock_seguridad': TextInput(
attrs={'class': 'form-control input-sm', 'type': 'number'}),
'stock_minimo': TextInput(
attrs={'class': 'form-control input-sm', 'type': 'number'}),
'stock_maximo': TextInput(
attrs={'class': 'form-control input-sm', 'type': 'number'}),
'clave_jde': TextInput(attrs={'class': 'form-control input-sm'}),
'estado': Select(attrs={'class': 'form-control input-sm'}),
'marca': TextInput(attrs={'class': 'form-control input-sm'}),
'modelo': TextInput(attrs={'class': 'form-control input-sm'}),
'numero_parte': TextInput(attrs={'class': 'form-control input-sm'}),
}
labels = {
'clave_jde': 'Clave JDE',
'stock_seguridad': 'Stock de Seguridad',
'numero_parte': 'No. Parte',
'stock_minimo': 'Stock Mínimo',
'stock_maximo': 'Stock Máximo',
'url': 'URL'
}
# ----------------- STOCK ----------------- #
class StockFilterForm(Form):
almacen = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
articulo = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
cantidad_menorque = CharField(
widget=TextInput(
attrs={'class': 'form-control input-sm'})
)
cantidad_mayorque = CharField(
widget=TextInput(
attrs={'class': 'form-control input-sm'})
)
def __init__(self, *args, **kwargs):
super(StockFilterForm, self).__init__(*args, **kwargs)
self.fields['articulo'].choices = self.obtener_Articulos()
self.fields['almacen'].choices = self.obtener_Almacenes()
def obtener_Articulos(self):
articulo = [('', 'Todos'), ]
registros = Articulo.objects.all()
for registro in registros:
if registro.clave is None:
clave = "-"
else:
clave = registro.clave
articulo.append(
(
registro.id,
"(%s) %s" % (clave, registro.descripcion)
)
)
return articulo
def obtener_Almacenes(self):
articulo = [('', 'Todos'), ]
registros = Almacen.objects.all()
for registro in registros:
articulo.append(
(
registro.id,
"(%s) %s" % (registro.clave, registro.descripcion)
)
)
return articulo
# ----------------- ENTRADAS ----------------- #
class EntradaSaldoFiltersForm(Form):
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_destino = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(EntradaSaldoFiltersForm, self).__init__(*args, **kwargs)
self.fields['almacen_destino'].choices = self.get_Almacenes()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
def get_Almacenes(self):
almacen_destino = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen_destino.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen_destino
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
class EntradaSaldoForm(ModelForm):
def __init__(self, *args, **kwargs):
super(EntradaSaldoForm, self).__init__(*args, **kwargs)
self.fields['almacen_destino'].required = True
class Meta:
model = MovimientoCabecera
fields = [
'descripcion',
'almacen_destino',
'fecha',
]
widgets = {
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_destino': Select(
attrs={
'class': 'form-control input-sm'
}
),
'fecha': TextInput(attrs={'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'}),
}
class EntradaCompraFiltersForm(Form):
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_destino = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
proveedor = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
persona_recibe = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(EntradaCompraFiltersForm, self).__init__(*args, **kwargs)
self.fields['almacen_destino'].choices = self.get_Almacenes()
self.fields['persona_recibe'].choices = self.get_Profiles()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
def get_Almacenes(self):
almacen_destino = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen_destino.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen_destino
def get_Profiles(self):
persona_recibe = [('', '-------')]
registros = Profile.objects.all()
for registro in registros:
persona_recibe.append(
(
registro.id,
registro.user.get_full_name()
)
)
return persona_recibe
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
class EntradaCompraForm(ModelForm):
def __init__(self, *args, **kwargs):
super(EntradaCompraForm, self).__init__(*args, **kwargs)
self.fields['almacen_destino'].required = True
class Meta:
model = MovimientoCabecera
fields = [
'descripcion',
'fecha',
'almacen_destino',
'proveedor',
'persona_recibe',
]
widgets = {
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_destino': Select(
attrs={'class': 'form-control input-sm'}
),
'fecha': TextInput(
attrs={
'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'
}
),
'proveedor': TextInput(attrs={'class': 'form-control input-sm'}),
'persona_recibe': Select(
attrs={'class': 'form-control input-sm'}
),
}
class EntradaAjusteFiltersForm(Form):
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_destino = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(EntradaAjusteFiltersForm, self).__init__(*args, **kwargs)
self.fields['almacen_destino'].choices = self.get_Almacenes()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
def get_Almacenes(self):
almacen_destino = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen_destino.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen_destino
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
class EntradaAjusteForm(ModelForm):
def __init__(self, *args, **kwargs):
super(EntradaAjusteForm, self).__init__(*args, **kwargs)
self.fields['almacen_destino'].required = True
class Meta:
model = MovimientoCabecera
fields = [
'descripcion',
'almacen_destino',
'fecha',
]
widgets = {
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_destino': Select(
attrs={
'class': 'form-control input-sm'
}
),
'fecha': TextInput(attrs={'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'}),
}
class EntradaTraspasoFiltersForm(Form):
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_origen = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
almacen_destino = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
persona_entrega = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
persona_recibe = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(EntradaTraspasoFiltersForm, self).__init__(*args, **kwargs)
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
self.fields['almacen_origen'].choices = self.get_Almacenes()
self.fields['almacen_destino'].choices = self.get_Almacenes()
self.fields['persona_entrega'].choices = self.get_Profiles()
self.fields['persona_recibe'].choices = self.get_Profiles()
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
def get_Almacenes(self):
almacen = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen
def get_Profiles(self):
persona = [('', '-------')]
registros = Profile.objects.all()
for registro in registros:
persona.append(
(
registro.id,
registro.user.get_full_name()
)
)
return persona
# ----------------- MOVIMIENTOS ----------------- #
class InventarioFiltersForm(Form):
tipo = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
descripcion_encabezado = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_destino = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
almacen_origen = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
proveedor = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
persona_recibe = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
persona_entrega = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
articulo = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
orden_trabajo = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
clasificacion = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm select2'}
)
)
def __init__(self, *args, **kwargs):
super(InventarioFiltersForm, self).__init__(*args, **kwargs)
self.fields['tipo'].choices = self.get_Tipo(MOVIMIENTO_TIPO)
self.fields['almacen_destino'].choices = self.get_Almacenes()
self.fields['almacen_origen'].choices = self.get_Almacenes()
self.fields['persona_entrega'].choices = self.get_Profiles()
self.fields['persona_recibe'].choices = self.get_Profiles()
self.fields['orden_trabajo'].choices = self.get_Ordenes()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
self.fields['clasificacion'].choices = self.get_Clasificacion(
MOVIMIENTO_CLASIFICACION)
self.fields['articulo'].choices = self.get_Articulos()
def get_Tipo(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
def get_Almacenes(self):
almacen_destino = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen_destino.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen_destino
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
def get_Profiles(self):
persona = [('', '-------')]
registros = Profile.objects.all()
for registro in registros:
persona.append(
(
registro.id,
registro.user.get_full_name()
)
)
return persona
def get_Ordenes(self):
orden_trabajo = [('', '-------')]
registros = OrdenTrabajo.objects.all()
for registro in registros:
value = "(%s) %s" % (registro.equipo, registro.descripcion)
orden_trabajo.append(
(
registro.id,
value
)
)
return orden_trabajo
def get_Clasificacion(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
def get_Articulos(self):
articulo = [('', '-------')]
registros = Articulo.objects.all()
for registro in registros:
if registro.clave is None:
clave = "-"
else:
clave = registro.clave
articulo.append(
(
registro.id,
"(%s) %s" % (clave, registro.descripcion)
)
)
return articulo
class InventarioForm(ModelForm):
class Meta:
model = MovimientoCabecera
fields = [
'tipo',
'clasificacion',
'descripcion',
'almacen_origen',
'almacen_destino',
'fecha',
'persona_recibe',
'persona_entrega',
'proveedor'
]
widgets = {
'tipo': Select(attrs={'class': 'form-control input-sm'}),
'clasificacion': Select(
attrs={
'class': 'form-control input-sm'
}
),
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_origen': Select(
attrs={
'class': 'form-control input-sm'
}
),
'almacen_destino': Select(
attrs={
'class': 'form-control input-sm'
}
),
'fecha': TextInput(attrs={'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'}),
'persona_recibe': Select(
attrs={
'class': 'form-control input-sm'
}
),
'persona_entrega': Select(
attrs={
'class': 'form-control input-sm'
}
),
'proveedor': TextInput(
attrs={
'class': 'form-control input-sm'
}
),
}
labels = {
'clasificacion': 'Clasificación',
'descripcion': 'Descripción',
}
# ------------------------ SALIDAS -------------------------- #
class SalidaPersonalFiltersForm(Form):
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_origen = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
persona_entrega = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
persona_recibe = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(SalidaPersonalFiltersForm, self).__init__(*args, **kwargs)
self.fields['almacen_origen'].choices = self.get_Almacenes()
self.fields['persona_recibe'].choices = self.get_Profiles()
self.fields['persona_entrega'].choices = self.get_Profiles()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
def get_Almacenes(self):
almacen_destino = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen_destino.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen_destino
def get_Profiles(self):
persona_recibe = [('', '-------')]
registros = Profile.objects.all()
for registro in registros:
persona_recibe.append(
(
registro.id,
registro.user.get_full_name()
)
)
return persona_recibe
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
class SalidaPersonalForm(ModelForm):
def __init__(self, *args, **kwargs):
super(SalidaPersonalForm, self).__init__(*args, **kwargs)
self.fields['almacen_origen'].required = True
self.fields['persona_entrega'].required = True
self.fields['persona_recibe'].required = True
class Meta:
model = MovimientoCabecera
fields = [
'descripcion',
'fecha',
'almacen_origen',
'persona_entrega',
'persona_recibe',
]
widgets = {
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_origen': Select(attrs={'class': 'form-control input-sm'}),
'fecha': TextInput(attrs={'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'}),
'persona_entrega': Select(
attrs={
'class': 'form-control input-sm'
}
),
'persona_recibe': Select(attrs={'class': 'form-control input-sm'}),
}
class SalidaOrdenTrabajoFiltersForm(Form):
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_origen = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
persona_entrega = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
persona_recibe = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
orden_trabajo = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(SalidaOrdenTrabajoFiltersForm, self).__init__(*args, **kwargs)
self.fields['almacen_origen'].choices = self.get_Almacenes()
self.fields['persona_recibe'].choices = self.get_Profiles()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
self.fields['orden_trabajo'].choices = self.get_Ordenes()
def get_Almacenes(self):
almacen_destino = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen_destino.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen_destino
def get_Profiles(self):
persona_recibe = [('', '-------')]
registros = Profile.objects.all()
for registro in registros:
persona_recibe.append(
(
registro.id,
registro.user.get_full_name()
)
)
return persona_recibe
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
def get_Ordenes(self):
orden_trabajo = [('', '-------')]
registros = OrdenTrabajo.objects.all()
for registro in registros:
orden_trabajo.append(
(
registro.id,
"(%s) %s" % (registro.id, registro.descripcion)
)
)
return orden_trabajo
class SalidaOrdenTrabajoForm(ModelForm):
def __init__(self, *args, **kwargs):
super(SalidaOrdenTrabajoForm, self).__init__(*args, **kwargs)
self.fields['orden_trabajo'].required = True
self.fields['almacen_origen'].required = True
self.fields['persona_entrega'].required = True
self.fields['persona_recibe'].required = True
class Meta:
model = MovimientoCabecera
fields = [
'descripcion',
'fecha',
'almacen_origen',
'persona_entrega',
'persona_recibe',
'orden_trabajo',
]
widgets = {
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_origen': Select(attrs={'class': 'form-control input-sm'}),
'fecha': TextInput(attrs={'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'}),
'persona_entrega': Select(
attrs={
'class': 'form-control input-sm'
}
),
'persona_recibe': Select(attrs={'class': 'form-control input-sm'}),
'orden_trabajo': Select(attrs={'class': 'form-control input-sm'}),
}
class SalidaAjusteFiltersForm(Form):
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_origen = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(SalidaAjusteFiltersForm, self).__init__(*args, **kwargs)
self.fields['almacen_origen'].choices = self.get_Almacenes()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
def get_Almacenes(self):
almacen_destino = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen_destino.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen_destino
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
class SalidaAjusteForm(ModelForm):
def __init__(self, *args, **kwargs):
super(SalidaAjusteForm, self).__init__(*args, **kwargs)
self.fields['almacen_origen'].required = True
class Meta:
model = MovimientoCabecera
fields = [
'descripcion',
'almacen_origen',
'fecha',
]
widgets = {
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_origen': Select(attrs={'class': 'form-control input-sm'}),
'fecha': TextInput(attrs={'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'}),
}
class SalidaTraspasoFiltersForm(Form):
descripcion = CharField(
widget=TextInput(attrs={'class': 'form-control input-sm'})
)
almacen_origen = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
almacen_destino = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
fecha_inicio = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
fecha_fin = CharField(
widget=TextInput(attrs={'class': 'form-control pull-right input-sm',
'data-date-format': 'yyyy-mm-dd'})
)
persona_entrega = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
persona_recibe = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
estado = ChoiceField(
widget=Select(
attrs={'class': 'form-control input-sm'}
)
)
def __init__(self, *args, **kwargs):
super(SalidaTraspasoFiltersForm, self).__init__(*args, **kwargs)
self.fields['almacen_origen'].choices = self.get_Almacenes()
self.fields['almacen_destino'].choices = self.get_Almacenes()
self.fields['persona_entrega'].choices = self.get_Profiles()
self.fields['persona_recibe'].choices = self.get_Profiles()
self.fields['estado'].choices = self.get_Estados(MOVIMIENTO_ESTADO)
def get_Almacenes(self):
almacen = [('', '-------')]
registros = Almacen.objects.all()
for registro in registros:
almacen.append(
(
registro.id,
"%s" % (registro.descripcion)
)
)
return almacen
def get_Estados(self, _opciones):
opciones = [('', '-------')]
for registro in _opciones:
opciones.append(registro)
return opciones
def get_Profiles(self):
persona = [('', '-------')]
registros = Profile.objects.all()
for registro in registros:
persona.append(
(
registro.id,
registro.user.get_full_name()
)
)
return persona
class SalidaTraspasoForm(ModelForm):
def __init__(self, *args, **kwargs):
super(SalidaTraspasoForm, self).__init__(*args, **kwargs)
self.fields['almacen_origen'].required = True
self.fields['almacen_destino'].required = True
class Meta:
model = MovimientoCabecera
fields = [
'descripcion',
'almacen_origen',
'almacen_destino',
'persona_entrega',
'persona_recibe',
'fecha',
]
widgets = {
'descripcion': TextInput(attrs={'class': 'form-control input-sm'}),
'almacen_origen': Select(attrs={'class': 'form-control input-sm'}),
'almacen_destino': Select(
attrs={
'class': 'form-control input-sm'
}),
'persona_entrega': Select(
attrs={
'class': 'form-control input-sm'
}),
'persona_recibe': Select(attrs={'class': 'form-control input-sm'}),
'fecha': TextInput(
attrs={
'class': 'form-control input-sm',
'data-date-format': 'yyyy-mm-dd'
})
}
| 29.432653
| 110
| 0.517903
| 3,137
| 36,055
| 5.813835
| 0.051323
| 0.072925
| 0.102095
| 0.153142
| 0.853438
| 0.840937
| 0.827558
| 0.806613
| 0.78978
| 0.768944
| 0
| 0.000379
| 0.342088
| 36,055
| 1,224
| 111
| 29.456699
| 0.768475
| 0.01359
| 0
| 0.675649
| 0
| 0
| 0.194019
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047904
| false
| 0
| 0.016966
| 0
| 0.199601
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
99c65bc071f98eff2d077387ed896127f560920f
| 319
|
py
|
Python
|
src/final_exam/q_employee/hourly_employee.py
|
acc-cosc-1336/cosc-1336-spring-2018-MJBrady13
|
f8d1ca9073058574a599d9047274b2b7ac8c7bb6
|
[
"MIT"
] | null | null | null |
src/final_exam/q_employee/hourly_employee.py
|
acc-cosc-1336/cosc-1336-spring-2018-MJBrady13
|
f8d1ca9073058574a599d9047274b2b7ac8c7bb6
|
[
"MIT"
] | 2
|
2018-03-01T20:35:59.000Z
|
2018-03-10T01:48:40.000Z
|
src/final_exam/q_employee/hourly_employee.py
|
acc-cosc-1336/cosc-1336-spring-2018-MJBrady13
|
f8d1ca9073058574a599d9047274b2b7ac8c7bb6
|
[
"MIT"
] | null | null | null |
from employee import Employee
class HourlyEmployee(Employee):
def __init__(self, hourly_rate, worked_hours, employee_id, name):
self.hourly_rate = hourly_rate
self.worked_hours = worked_hours
def calculate(hourly_rate, worked_hours):
return (hourly_rate * worked_hours)
| 24.538462
| 70
| 0.702194
| 38
| 319
| 5.5
| 0.421053
| 0.239234
| 0.229665
| 0.301435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231975
| 319
| 12
| 71
| 26.583333
| 0.853061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
99e18480d04212b32b654d8c5294df9e8a818505
| 177
|
py
|
Python
|
cobra_utils/__init__.py
|
earmingol/cobra_utils
|
c04d2cd0aa13da247843ad51bd1fe2d638d7af50
|
[
"MIT"
] | 1
|
2021-02-12T17:43:53.000Z
|
2021-02-12T17:43:53.000Z
|
cobra_utils/__init__.py
|
earmingol/cobra_utils
|
c04d2cd0aa13da247843ad51bd1fe2d638d7af50
|
[
"MIT"
] | 1
|
2019-08-15T23:28:39.000Z
|
2019-08-15T23:28:39.000Z
|
cobra_utils/__init__.py
|
earmingol/cobra_utils
|
c04d2cd0aa13da247843ad51bd1fe2d638d7af50
|
[
"MIT"
] | 1
|
2019-08-15T22:28:35.000Z
|
2019-08-15T22:28:35.000Z
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from cobra_utils import io
from cobra_utils import query
from cobra_utils import topology
__version__ = "0.3.1"
| 19.666667
| 38
| 0.774011
| 27
| 177
| 4.62963
| 0.592593
| 0.216
| 0.336
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02649
| 0.146893
| 177
| 9
| 39
| 19.666667
| 0.801325
| 0.118644
| 0
| 0
| 0
| 0
| 0.032258
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
99e647e5a02615fe84574667d3137a3fc8528e67
| 170
|
py
|
Python
|
utils/hashing.py
|
omgthatsjackie/keeper
|
3426e1ac54b08cd513b9d46dbbb8502abbd8e394
|
[
"MIT"
] | 1
|
2021-01-02T16:08:37.000Z
|
2021-01-02T16:08:37.000Z
|
utils/hashing.py
|
omgthatsjackie/keeper
|
3426e1ac54b08cd513b9d46dbbb8502abbd8e394
|
[
"MIT"
] | null | null | null |
utils/hashing.py
|
omgthatsjackie/keeper
|
3426e1ac54b08cd513b9d46dbbb8502abbd8e394
|
[
"MIT"
] | null | null | null |
from hashlib import pbkdf2_hmac
salt = b'bruhbruhbruh'
def hash_password(password):
return pbkdf2_hmac('sha256', password.encode('utf-8'), salt, 100000).hex()[0:50]
| 28.333333
| 84
| 0.741176
| 25
| 170
| 4.92
| 0.8
| 0.162602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099338
| 0.111765
| 170
| 6
| 84
| 28.333333
| 0.715232
| 0
| 0
| 0
| 0
| 0
| 0.134503
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.5
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.