hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aae28c7339af164d7c6311254a641d03c35faa40
| 2,835
|
py
|
Python
|
bilstm-crf-model/utils_ner.py
|
dedeguo/knowledge_graph_construction
|
5b6ef76b56d04811ed2bc9321719a433ba042358
|
[
"Apache-2.0"
] | null | null | null |
bilstm-crf-model/utils_ner.py
|
dedeguo/knowledge_graph_construction
|
5b6ef76b56d04811ed2bc9321719a433ba042358
|
[
"Apache-2.0"
] | null | null | null |
bilstm-crf-model/utils_ner.py
|
dedeguo/knowledge_graph_construction
|
5b6ef76b56d04811ed2bc9321719a433ba042358
|
[
"Apache-2.0"
] | null | null | null |
def get_entity_bios(seq,id2label):
"""Gets entities from sequence.
note: BIOS
Args:
seq (list): sequence of labels.
Returns:
list: list of (chunk_type, chunk_start, chunk_end).
Example:
# >>> seq = ['B-PER', 'I-PER', 'O', 'S-LOC']
# >>> get_entity_bios(seq)
[['PER', 0,1], ['LOC', 3, 3]]
"""
chunks = []
chunk = [-1, -1, -1]
for indx, tag in enumerate(seq):
if not isinstance(tag, str):
tag = id2label[tag]
if tag.startswith("S-"):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[2] = indx
chunk[0] = tag.split('-')[1]
chunks.append(chunk)
chunk = (-1, -1, -1)
if tag.startswith("B-"):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[0] = tag.split('-')[1]
elif tag.startswith('I-') and chunk[1] != -1:
_type = tag.split('-')[1]
if _type == chunk[0]:
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
else:
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
return chunks
def get_entity_bio(seq,id2label):
"""Gets entities from sequence.
note: BIO
Args:
seq (list): sequence of labels.
Returns:
list: list of (chunk_type, chunk_start, chunk_end).
Example:
seq = ['B-PER', 'I-PER', 'O', 'B-LOC']
get_entity_bio(seq)
#output
[['PER', 0,1], ['LOC', 3, 3]]
"""
chunks = []
chunk = [-1, -1, -1]
for indx, tag in enumerate(seq):
if not isinstance(tag, str):
tag = id2label[tag]
if tag.startswith("B-"):
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
chunk[1] = indx
chunk[0] = tag.split('-')[1]
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
elif tag.startswith('I-') and chunk[1] != -1:
_type = tag.split('-')[1]
if _type == chunk[0]:
chunk[2] = indx
if indx == len(seq) - 1:
chunks.append(chunk)
else:
if chunk[2] != -1:
chunks.append(chunk)
chunk = [-1, -1, -1]
return chunks
def get_entities(seq,id2label,markup='bios'):
'''
:param seq:
:param id2label:
:param markup:
:return:
'''
assert markup in ['bio','bios']
if markup =='bio':
return get_entity_bio(seq,id2label)
else:
return get_entity_bios(seq,id2label)
| 28.928571
| 59
| 0.45291
| 340
| 2,835
| 3.708824
| 0.158824
| 0.028549
| 0.055512
| 0.128469
| 0.8636
| 0.808882
| 0.794607
| 0.732752
| 0.712133
| 0.712133
| 0
| 0.042334
| 0.383422
| 2,835
| 97
| 60
| 29.226804
| 0.679062
| 0.208113
| 0
| 0.859375
| 0
| 0
| 0.013679
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 1
| 0.046875
| false
| 0
| 0
| 0
| 0.109375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2ac744eb42dd4bb7bed1f5f6c5badc17bde609c5
| 1,516
|
py
|
Python
|
DBQueue/benchmark.py
|
EggPool/bis-temp
|
e5c9fae5df70f2ce930d45467e30eea733dd0697
|
[
"MIT"
] | null | null | null |
DBQueue/benchmark.py
|
EggPool/bis-temp
|
e5c9fae5df70f2ce930d45467e30eea733dd0697
|
[
"MIT"
] | null | null | null |
DBQueue/benchmark.py
|
EggPool/bis-temp
|
e5c9fae5df70f2ce930d45467e30eea733dd0697
|
[
"MIT"
] | null | null | null |
from socket import socket
import connections
from time import time
import threading
NUM = 20
def do_A(index: int):
s = socket()
s.settimeout(10)
s.connect(("127.0.0.1", 5555))
connections.send(s, "storeA")
connections.send(s, index)
res = connections.receive(s)
print(res)
def do_B(index: int):
s = socket()
s.settimeout(10)
s.connect(("127.0.0.1", 5555))
connections.send(s, "storeB")
connections.send(s, index)
res = connections.receive(s)
print(res)
if __name__ == "__main__":
start = time()
print(f"{start} -Start")
threads = []
for i in range(NUM):
thread = threading.Thread(target=do_A, args=(i,))
threads.append(thread)
now = time()
print(f"{now} - {NUM} threads created")
for t in threads:
t.start()
now = time()
print(f"{now} - All threads started")
for t in threads:
t.join()
now = time()
print(f"{now} - All threads ended")
print(f"{now - start} - Total Time A ")
print("")
start = time()
print(f"{start} -Start")
threads = []
for i in range(NUM):
thread = threading.Thread(target=do_B, args=(i,))
threads.append(thread)
now = time()
print(f"{now} - {NUM} threads created")
for t in threads:
t.start()
now = time()
print(f"{now} - All threads started")
for t in threads:
t.join()
now = time()
print(f"{now} - All threads ended")
print(f"{now - start} - Total Time B ")
| 22.969697
| 57
| 0.573879
| 212
| 1,516
| 4.04717
| 0.235849
| 0.06993
| 0.09324
| 0.090909
| 0.86014
| 0.86014
| 0.86014
| 0.86014
| 0.86014
| 0.86014
| 0
| 0.023466
| 0.269129
| 1,516
| 65
| 58
| 23.323077
| 0.750903
| 0
| 0
| 0.736842
| 0
| 0
| 0.188654
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.070175
| 0
| 0.105263
| 0.22807
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2d476437737bdbe0ca9d6c6d996509e854521bfe
| 5,721
|
py
|
Python
|
pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/sisf/configure/clear_device_tracking_database/test_api_clear_device_tracking_database.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | 1
|
2022-01-16T10:00:24.000Z
|
2022-01-16T10:00:24.000Z
|
pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/sisf/configure/clear_device_tracking_database/test_api_clear_device_tracking_database.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
pkgs/sdk-pkg/src/genie/libs/sdk/apis/tests/iosxe/sisf/configure/clear_device_tracking_database/test_api_clear_device_tracking_database.py
|
patrickboertje/genielibs
|
61c37aacf3dd0f499944555e4ff940f92f53dacb
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from pyats.topology import loader
from genie.libs.sdk.apis.iosxe.sisf.configure import clear_device_tracking_database
class TestClearDeviceTrackingDatabase(unittest.TestCase):
@classmethod
def setUpClass(self):
testbed = """
devices:
sisf-c9500-21-8-26-2:
connections:
defaults:
class: unicon.Unicon
a:
command: mock_device_cli --os iosxe --mock_data_dir mock_data --state connect
protocol: unknown
os: iosxe
platform: ios
type: ios
"""
self.testbed = loader.load(testbed)
self.device = self.testbed.devices['sisf-c9500-21-8-26-2']
self.device.connect(
learn_hostname=True,
init_config_commands=[],
init_exec_commands=[]
)
def test_clear_device_tracking_database(self):
result = clear_device_tracking_database(device=self.device, options=None)
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_1(self):
result = clear_device_tracking_database(device=self.device, options=[{'force': True}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_2(self):
result = clear_device_tracking_database(device=self.device, options=[{'policy': 'test'}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_3(self):
result = clear_device_tracking_database(device=self.device, options=[{'vlanid': 10}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_4(self):
result = clear_device_tracking_database(device=self.device, options=[{'interface': {'force': True, 'interface': 'te1/0/1'}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_5(self):
result = clear_device_tracking_database(device=self.device, options=[{'interface': {'interface': 'te1/0/1', 'vlanid': 10}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_6(self):
result = clear_device_tracking_database(device=self.device, options=[{'mac': {'address': 'dead.beef.0001', 'target': {'force': True}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_7(self):
result = clear_device_tracking_database(device=self.device, options=[{'mac': {'address': 'dead.beef.0001', 'target': {'interface': 'te1/0/1'}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_8(self):
result = clear_device_tracking_database(device=self.device, options=[{'mac': {'address': 'dead.beef.0001', 'target': {'policy': 'test'}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_9(self):
result = clear_device_tracking_database(device=self.device, options=[{'mac': {'address': 'dead.beef.0001', 'target': {'vlanid': 10}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_10(self):
result = clear_device_tracking_database(device=self.device, options=[{'address': {'address': '20.20.20.20', 'target': {'force': True}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_11(self):
result = clear_device_tracking_database(device=self.device, options=[{'address': {'address': '20.20.20.20', 'target': {'interface': 'te1/0/1'}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_12(self):
result = clear_device_tracking_database(device=self.device, options=[{'address': {'address': '20.20.20.20', 'target': {'policy': 'test'}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_13(self):
result = clear_device_tracking_database(device=self.device, options=[{'address': {'address': '20.20.20.20', 'target': {'vlanid': 10}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_14(self):
result = clear_device_tracking_database(device=self.device, options=[{'prefix': {'address': '3001::1/48', 'target': {'force': True}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_15(self):
result = clear_device_tracking_database(device=self.device, options=[{'prefix': {'address': '3001::1/48', 'target': {'interface': 'te1/0/1'}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_16(self):
result = clear_device_tracking_database(device=self.device, options=[{'prefix': {'address': '3001::1/48', 'target': {'policy': 'test'}}}])
expected_output = None
self.assertEqual(result, expected_output)
def test_clear_device_tracking_database_17(self):
result = clear_device_tracking_database(device=self.device, options=[{'prefix': {'address': '3001::1/48', 'target': {'vlanid': 10}}}])
expected_output = None
self.assertEqual(result, expected_output)
| 47.675
| 154
| 0.682573
| 667
| 5,721
| 5.566717
| 0.14093
| 0.109615
| 0.189335
| 0.269055
| 0.869917
| 0.869917
| 0.860759
| 0.860759
| 0.860759
| 0.842984
| 0
| 0.031466
| 0.188953
| 5,721
| 119
| 155
| 48.07563
| 0.76875
| 0
| 0
| 0.367347
| 0
| 0
| 0.162909
| 0.003671
| 0
| 0
| 0
| 0
| 0.183673
| 1
| 0.193878
| false
| 0
| 0.030612
| 0
| 0.234694
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2d65d4466e142e05248d0e588eb9d04a06bdac4a
| 226,020
|
py
|
Python
|
spec/contracts/tariff_spec.py
|
gisce/enerdata
|
05a4afe61f20dcbcff070c71e96f6fd10955c216
|
[
"MIT"
] | 7
|
2015-10-09T11:51:18.000Z
|
2022-01-12T13:40:26.000Z
|
spec/contracts/tariff_spec.py
|
gisce/enerdata
|
05a4afe61f20dcbcff070c71e96f6fd10955c216
|
[
"MIT"
] | 93
|
2015-12-12T14:34:21.000Z
|
2021-08-12T08:29:25.000Z
|
spec/contracts/tariff_spec.py
|
gisce/enerdata
|
05a4afe61f20dcbcff070c71e96f6fd10955c216
|
[
"MIT"
] | 5
|
2015-12-12T22:19:55.000Z
|
2022-01-12T13:40:30.000Z
|
from expects.testing import failure
from expects import *
from enerdata.contracts.tariff import *
from datetime import datetime, timedelta
from enerdata.datetime.timezone import TIMEZONE
from mamba import before, context, description, it
with description('Create a period'):
with it('accepts "te"'):
TariffPeriod('P1', 'te')
with it('accepts "tp"'):
TariffPeriod('P1', 'tp')
with it('fails when is not "te" nor "tp"'):
with failure:
TariffPeriod('P1', 'foo')
with it('should raise an exception if range of hours is invalid'):
expect(lambda: TariffPeriod('P1', 'te', winter_hours=[
(0, 12), (11, 23)
])).to(raise_error(ValueError, 'Invalid winter hours'))
expect(lambda: TariffPeriod('P1', 'te', summer_hours=[
(0, 12), (11, 23)
])).to(raise_error(ValueError, 'Invalid summer hours'))
with description('A period'):
with it('should have a range of hours 24 for winter by default'):
p1 = TariffPeriod('P1', 'te')
assert p1.winter_hours == [(0, 24)]
with it('should be possible to set the range of winter hours in creation'):
p1 = TariffPeriod('P1', 'te', winter_hours=[(0, 12)])
assert p1.winter_hours == [(0, 12)]
with it('should have a range of hours 24 for summer by default'):
p1 = TariffPeriod('P1', 'te')
assert p1.summer_hours == [(0, 24)]
with it('should be possible to set the range of sumer hours in creation'):
p1 = TariffPeriod('P1', 'te', summer_hours=[(0, 12)])
assert p1.summer_hours == [(0, 12)]
with it('has to validate range hours is correct'):
assert check_range_hours([(0, 12)]) is True
assert check_range_hours([(0, 12), (12, 24)]) is True
assert check_range_hours([(-1, 0)]) is False
assert check_range_hours([(0, 25)]) is False
assert check_range_hours([(0, 0)]) is False
assert check_range_hours([(4, 1)]) is False
assert check_range_hours([(0, 1), (0, 2)]) is False
assert check_range_hours([(0, 12), (12, 24)]) is True
with it('should know the total of hours in summer'):
p1 = TariffPeriod('P1', 'te', summer_hours=[(0, 12), (22, 24)])
assert p1.total_summer_hours == 14
with it('should know the total of hours in winter'):
p1 = TariffPeriod('P1', 'te', winter_hours=[(12, 22)])
assert p1.total_winter_hours == 10
with context('A tariff'):
with before.all:
self.tariff = TariffPreTD('T1')
with it('periods should be a tuple type'):
assert isinstance(self.tariff.periods, tuple)
with it('should return the number of periods of te'):
self.tariff.periods = (
TariffPeriod('1', 'te', winter_hours=[(12, 22)], summer_hours=[(13, 23)]),
TariffPeriod('2', 'te', winter_hours=[(0, 12), (22, 24)], summer_hours=[(0, 13), (23, 24)])
)
assert self.tariff.get_number_of_periods() == 2
with it('should return the periods of energy'):
assert len(self.tariff.energy_periods) == 2
assert self.tariff.energy_periods.keys() == ['1', '2']
with it('should return the periods of power'):
assert len(self.tariff.power_periods) == 0
self.tariff.periods += (TariffPeriod('1', 'tp'),)
assert len(self.tariff.power_periods) == 1
assert self.tariff.power_periods.keys() == ['1']
with it('should have 24h of range ours in its energy periods'):
def set_periods():
self.tariff.periods = (
TariffPeriod('1', 'te', summer_hours=[(12, 22)]),
TariffPeriod('2', 'te', summer_hours=[(0, 12), (22, 23)])
)
expect(set_periods).to(raise_error(ValueError))
with it('should check range of hours'):
def set_periods():
self.tariff.periods = (
TariffPeriod('1', 'te', summer_hours=[(13, 23)]),
TariffPeriod('2', 'te', summer_hours=[(0, 12), (22, 24)])
)
expect(set_periods).to(raise_error(ValueError))
with it('should check range and hours if a holiday period is defined'):
def set_periods():
self.tariff.periods = (
TariffPeriod('P1', 'te', winter_hours=[(18, 22)], summer_hours=[(11, 15)]),
TariffPeriod('P2', 'te', winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]),
TariffPeriod('P3', 'te', winter_hours=[(0, 8)], summer_hours=[(0, 8)]),
TariffPeriod('P4', 'te', holiday=True, winter_hours=[(18, 22)], summer_hours=[(11, 15)]),
TariffPeriod('P5', 'te', holiday=True, winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]),
TariffPeriod('P6', 'te', holiday=True, winter_hours=[(0, 8)], summer_hours=[(1, 8)])
)
expect(set_periods).to(raise_error(ValueError, 'The sum of hours in summer (in holidays) must be 24h: [(1, 8), (8, 11), (11, 15), (15, 24)]'))
with it('should find the period by datetime'):
self.tariff.periods = (
TariffPeriod('P1', 'te', winter_hours=[(18, 22)], summer_hours=[(11, 15)]),
TariffPeriod('P2', 'te', winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]),
TariffPeriod('P3', 'te', winter_hours=[(0, 8)], summer_hours=[(0, 8)]),
TariffPeriod('P4', 'te', holiday=True, winter_hours=[(18, 22)], summer_hours=[(11, 15)]),
TariffPeriod('P5', 'te', holiday=True, winter_hours=[(8, 18), (22, 24)], summer_hours=[(8, 11), (15, 24)]),
TariffPeriod('P6', 'te', holiday=True, winter_hours=[(0, 8)], summer_hours=[(0, 8)])
)
dt = TIMEZONE.localize(datetime(2015, 12, 24, 19, 0, 0))
period = self.tariff.get_period_by_date(dt)
assert period.code == 'P1'
dt = TIMEZONE.localize(datetime(2015, 12, 25, 19, 0, 0))
period = self.tariff.get_period_by_date(dt)
assert period.code == 'P4'
dt = TIMEZONE.localize(datetime(2015, 12, 27, 19, 0, 0))
period = self.tariff.get_period_by_date(dt)
assert period.code == 'P4'
dt = TIMEZONE.localize(datetime(2015, 12, 27, 19, 0, 0))
period = self.tariff.get_period_by_date(dt)
assert period.code == 'P4'
dt = TIMEZONE.localize(datetime(2015, 12, 27, 17, 0, 0))
period = self.tariff.get_period_by_date(dt)
assert period.code == 'P5'
dt = TIMEZONE.localize(datetime(2015, 12, 27, 1, 0, 0))
period = self.tariff.get_period_by_date(dt)
assert period.code == 'P6'
with it('should allow to check if a set of powers is correct'):
tari_T20A = T20A()
expect(lambda: tari_T20A.evaluate_powers([-10])).to(
raise_error(NotPositivePower))
expect(lambda: tari_T20A.evaluate_powers([0])).to(
raise_error(NotPositivePower))
expect(lambda: tari_T20A.evaluate_powers([5.55])).to(
raise_error(NotNormalizedPower))
assert tari_T20A.evaluate_powers([5.5])
expect(lambda: tari_T20A.evaluate_powers([5, 7])).to(
raise_error(IncorrectPowerNumber, 'Expected 1 power(s) and got 2'))
expect(lambda: tari_T20A.evaluate_powers([100])).to(
raise_error(IncorrectMaxPower))
tari_T30A = T30A()
expect(lambda: tari_T30A.evaluate_powers([-10, -5, 0])).to(
raise_error(NotPositivePower))
expect(lambda: tari_T30A.evaluate_powers([15, 15, 15])).to(
raise_error(IncorrectMaxPower))
expect(lambda: tari_T30A.evaluate_powers([16, 17.1, 16])).to(
raise_error(NotNormalizedPower))
expect(lambda: tari_T30A.evaluate_powers([14, 15.242, 15.242])).to(
raise_error(IncorrectMinPower))
assert tari_T30A.evaluate_powers([15.242, 15.242, 16.454])
expect(lambda: tari_T30A.evaluate_powers([16, 17])).to(
raise_error(IncorrectPowerNumber, 'Expected 3 power(s) and got 2'))
tari_T31A = T31A()
expect(lambda: tari_T31A.evaluate_powers([-10, -5, 0])).to(
raise_error(NotPositivePower))
assert tari_T31A.evaluate_powers([10, 13, 16])
expect(lambda: tari_T31A.evaluate_powers([16, 17])).to(
raise_error(IncorrectPowerNumber, 'Expected 3 power(s) and got 2'))
expect(lambda: tari_T31A.evaluate_powers([16, 20, 16])).to(
raise_error(NotAscendingPowers))
tari_T61A = T61A()
expect(lambda: tari_T61A.evaluate_powers([-10, -5, 0, 10, 20])).to(
raise_error(NotPositivePower))
assert tari_T61A.evaluate_powers([400, 410, 420, 430, 440, 451])
assert tari_T61A.evaluate_powers([500, 600, 700, 800, 900, 1000])
expect(lambda: tari_T61A.evaluate_powers([16, 17])).to(
raise_error(IncorrectPowerNumber, 'Expected 6 power(s) and got 2'))
expect(
lambda: tari_T61A.evaluate_powers([500, 600, 700, 700, 600, 500])
).to(
raise_error(NotAscendingPowers))
tari_T61B = T61B()
expect(lambda: tari_T61B.evaluate_powers([-10, -5, 0, 10, 20])).to(
raise_error(NotPositivePower))
assert tari_T61B.evaluate_powers([400, 410, 420, 430, 440, 451])
assert tari_T61B.evaluate_powers([500, 600, 700, 800, 900, 1000])
expect(lambda: tari_T61B.evaluate_powers([16, 17])).to(
raise_error(IncorrectPowerNumber, 'Expected 6 power(s) and got 2'))
expect(
lambda: tari_T61B.evaluate_powers([500, 600, 700, 700, 600, 500])
).to(
raise_error(NotAscendingPowers))
tari_T62 = T62()
expect(lambda: tari_T62.evaluate_powers([-10, -5, 0, 10, 20])).to(
raise_error(NotPositivePower))
assert tari_T62.evaluate_powers([400, 410, 420, 430, 440, 451])
assert tari_T62.evaluate_powers([500, 600, 700, 800, 900, 1000])
expect(lambda: tari_T62.evaluate_powers([16, 17])).to(
raise_error(IncorrectPowerNumber, 'Expected 6 power(s) and got 2'))
expect(
lambda: tari_T62.evaluate_powers([500, 600, 700, 700, 600, 500])
).to(
raise_error(NotAscendingPowers))
tari_T63 = T63()
expect(lambda: tari_T63.evaluate_powers([-10, -5, 0, 10, 20])).to(
raise_error(NotPositivePower))
assert tari_T63.evaluate_powers([400, 410, 420, 430, 440, 451])
assert tari_T63.evaluate_powers([500, 600, 700, 800, 900, 1000])
expect(lambda: tari_T63.evaluate_powers([16, 17])).to(
raise_error(IncorrectPowerNumber, 'Expected 6 power(s) and got 2'))
expect(
lambda: tari_T63.evaluate_powers([500, 600, 700, 700, 600, 500])
).to(
raise_error(NotAscendingPowers))
tari_T64 = T64()
expect(lambda: tari_T64.evaluate_powers([-10, -5, 0, 10, 20])).to(
raise_error(NotPositivePower))
assert tari_T64.evaluate_powers([400, 410, 420, 430, 440, 451])
assert tari_T64.evaluate_powers([500, 600, 700, 800, 900, 1000])
expect(lambda: tari_T64.evaluate_powers([16, 17])).to(
raise_error(IncorrectPowerNumber, 'Expected 6 power(s) and got 2'))
expect(
lambda: tari_T64.evaluate_powers([500, 600, 700, 700, 600, 500])
).to(
raise_error(NotAscendingPowers))
with it('should allow to check if a set of powers is correct'):
tari_T20A = T20A()
assert len(tari_T20A.evaluate_powers_all_checks([-10]))
assert len(tari_T20A.evaluate_powers_all_checks([0]))
assert len(tari_T20A.evaluate_powers_all_checks([3.5])) == 0
assert len(tari_T20A.evaluate_powers_all_checks([5, 7]))
assert len(tari_T20A.evaluate_powers_all_checks([100]))
tari_T30A = T30A()
assert len(tari_T30A.evaluate_powers_all_checks([-10, -5, 0]))
assert len(tari_T30A.evaluate_powers_all_checks([15, 15, 15]))
assert len(tari_T30A.evaluate_powers_all_checks([16, 17.1, 16]))
assert len(tari_T30A.evaluate_powers_all_checks([14, 15.242, 15.242]))
assert len(tari_T30A.evaluate_powers_all_checks([15.242, 15.242, 16.454])) == 0
assert len(tari_T30A.evaluate_powers_all_checks([16, 17]))
tari_T31A = T31A()
assert len(tari_T31A.evaluate_powers_all_checks([-10, -5, 0]))
assert len(tari_T31A.evaluate_powers_all_checks([10, 13, 16])) == 0
assert len(tari_T31A.evaluate_powers_all_checks([16, 17]))
assert len(tari_T31A.evaluate_powers_all_checks([16, 20, 16]))
tari_T61A = T61A()
assert len(tari_T61A.evaluate_powers_all_checks([-10, -5, 0, 10, 20]))
assert len(tari_T61A.evaluate_powers_all_checks([400, 410, 420, 430, 440, 451])) == 0
assert len(tari_T61A.evaluate_powers_all_checks([500, 600, 700, 800, 900, 1000])) == 0
assert len(tari_T61A.evaluate_powers_all_checks([16, 17]))
assert len(tari_T61A.evaluate_powers_all_checks([500, 600, 700, 700, 600, 500]))
with it('shouldn\'t fail due to bad rounding'):
tari_T20A = T20A()
assert tari_T20A.evaluate_powers([8050.0/1000])
with it('should allow to check if a maximum power is correct'):
tari_T20A = T20A()
assert not tari_T20A.is_maximum_power_correct(-10)
assert not tari_T20A.is_maximum_power_correct(0)
assert tari_T20A.is_maximum_power_correct(7)
assert tari_T20A.is_maximum_power_correct(10)
assert not tari_T20A.is_maximum_power_correct(1000)
with context('without correct_power implemented'):
with it('should raise NotImplemented on correct_power call'):
# All 2.X have it so we don't check anything on them here
# All others don't have it implemented so far
expect(lambda: T30A().correct_powers([1, 2, 3])).to(
raise_error(NotImplementedError))
expect(lambda: T31A().correct_powers([1, 2, 3])).to(
raise_error(NotImplementedError))
expect(lambda: T61A().correct_powers([1, 2, 3, 4, 5, 6])).to(
raise_error(NotImplementedError))
expect(lambda: T61B().correct_powers([1, 2, 3, 4, 5, 6])).to(
raise_error(NotImplementedError))
expect(lambda: T62().correct_powers([1, 2, 3, 4, 5, 6])).to(
raise_error(NotImplementedError))
expect(lambda: T63().correct_powers([1, 2, 3, 4, 5, 6])).to(
raise_error(NotImplementedError))
expect(lambda: T64().correct_powers([1, 2, 3, 4, 5, 6])).to(
raise_error(NotImplementedError))
with context('with correct_power implemented'):
with it('should return a correct power if a wrong one is sent'):
t_20A = T20A()
corr_powers_20A = t_20A.correct_powers([0])
assert t_20A.are_powers_normalized(corr_powers_20A)
t_20DHA = T20DHA()
corr_powers_20DHA = t_20DHA.correct_powers([0])
assert t_20DHA.are_powers_normalized(corr_powers_20DHA)
t_20DHS = T20DHS()
corr_powers_20DHS = t_20DHS.correct_powers([0])
assert t_20DHS.are_powers_normalized(corr_powers_20DHS)
t_21A = T21A()
corr_powers_21A = t_21A.correct_powers([10])
assert t_21A.are_powers_normalized(corr_powers_21A)
t_21DHA = T21DHA()
corr_powers_21DHA = t_21DHA.correct_powers([10])
assert t_21DHA.are_powers_normalized(corr_powers_21DHA)
t_21DHS = T21DHS()
corr_powers_21DHS = t_21DHS.correct_powers([10])
assert t_21DHS.are_powers_normalized(corr_powers_21DHS)
with it('should return the same power if it\'s correct'):
assert T20A().correct_powers([0.330]) == [0.330]
assert T20DHA().correct_powers([0.345]) == [0.345]
assert T20DHS().correct_powers([0.660]) == [0.660]
assert T21A().correct_powers([10.350]) == [10.350]
assert T21DHA().correct_powers([10.392]) == [10.392]
assert T21DHS().correct_powers([11.000]) == [11.000]
with it('should return it\'s min and max powers'):
assert T20A().get_min_power() == 0
assert T20DHA().get_min_power() == 0
assert T20DHS().get_min_power() == 0
assert T21A().get_min_power() == 10
assert T21DHA().get_min_power() == 10
assert T21DHS().get_min_power() == 10
assert T30A().get_min_power() == 15
assert T31A().get_min_power() == 1
assert T61A().get_min_power() == 450
assert T61B().get_min_power() == 450
assert T62().get_min_power() == 450
assert T63().get_min_power() == 450
assert T64().get_min_power() == 450
assert T20A().get_max_power() == 10
assert T20DHA().get_max_power() == 10
assert T20DHS().get_max_power() == 10
assert T21A().get_max_power() == 15
assert T21DHA().get_max_power() == 15
assert T21DHS().get_max_power() == 15
# T30A doesn't have a max power
assert T31A().get_max_power() == 450
# T6X don't have a max power
with context('3.0A tariff'):
with before.all:
self.tariff = T30A()
self.periods = self.tariff.energy_periods
with it("should return 6 energy periods"):
assert len(self.periods) == 6
with it("should contain P1 to P6 periods"):
assert 'P1' in self.periods.keys()
assert 'P2' in self.periods.keys()
assert 'P3' in self.periods.keys()
assert 'P4' in self.periods.keys()
assert 'P5' in self.periods.keys()
assert 'P6' in self.periods.keys()
with context('A 3.1A LB Tariff'):
with it('must indicate the kva with an integer'):
def createT31A_LB():
T31A(kva='1')
expect(createT31A_LB).to(raise_error(ValueError, 'kva must be an enter value'))
with it('must activate LB flag'):
the_tariff = T31A(kva=1)
assert the_tariff.low_voltage_measure
with description('Getting a tariff by descripion'):
with it('must return the appropiate tariff and initialize the expected cof'):
tariffs = [
('2.0A', T20A),
('2.0DHA', T20DHA),
('2.0DHS', T20DHS),
('2.1A', T21A),
('2.1DHA', T21DHA),
('2.1DHS', T21DHS),
('3.0A', T30A),
('3.1A', T31A),
('3.1A LB', T31A),
]
tariff_cof = {
'2.0A': 'A',
'2.0DHA': 'B',
'2.0DHS': 'D',
'2.1A': 'A',
'2.1DHA': 'B',
'2.1DHS': 'D',
'3.0A': 'C',
'3.1A': 'C',
'3.1A LB': 'C',
}
for t in tariffs:
t_obj = get_tariff_by_code(t[0])()
assert isinstance(t_obj, t[1])
assert t_obj.cof, "Object doesn't have cof attribute"
assert t_obj.cof == tariff_cof[t[0]], "Object cof not match with expected one"
with it('must return None if the code is not in available'):
t = get_tariff_by_code('NO_EXISTS')
expect(t).to(be_none)
with description('Correct period for tariff an hour'):
with before.all:
self.winter_holiday_day = TIMEZONE.localize(datetime(2014, 11, 1))
self.summer_holiday_day = TIMEZONE.localize(datetime(2014, 8, 15))
self.winter_weekend_day = TIMEZONE.localize(datetime(2014, 2, 15))
self.summer_weekend_day = TIMEZONE.localize(datetime(2014, 6, 21))
self.winter_laboral_day = TIMEZONE.localize(datetime(2014, 11, 12))
self.summer_laboral_day = TIMEZONE.localize(datetime(2014, 7, 16))
with context('2.0DHA'):
with before.all:
self.tarifa = T20DHA()
with it('should have code 2.0DHA'):
assert self.tarifa.code == '2.0DHA'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59)).code == 'P2'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with context('2.0DHS'):
with before.all:
self.tarifa = T20DHS()
with it('should have code 2.0DHS'):
assert self.tarifa.code == '2.0DHS'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59)).code == 'P1'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with context('2.1DHA'):
with before.all:
self.tarifa = T21DHA()
with it('should have code 2.1DHA'):
assert self.tarifa.code == '2.1DHA'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59)).code == 'P2'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with context('2.1DHS'):
with before.all:
self.tarifa = T21DHS()
with it('should have code 2.1DHS'):
assert self.tarifa.code == '2.1DHS'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59)).code == 'P1'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with context('3.0A'):
with before.all:
self.tarifa = T30A()
with it('should have code 3.0A'):
assert self.tarifa.code == '3.0A'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to friday 31/10/17
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to thursday 14/08/17
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to latest hour of friday
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to latest hour of friday
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
with context("If doesn't have holidays"):
with before.all:
self.tarifa = T30ANoFestivos()
with it('should have code 3.0A'):
assert self.tarifa.code == '3.0A'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(
dia).code == 'P2' # points to friday 31/10/17
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=18)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(
dia).code == 'P2' # points to thursday 14/08/17
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=12)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=16)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=18)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=12)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=16)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(
dia).code == 'P2' # points to latest hour of friday
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=18)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(
dia).code == 'P2' # points to latest hour of friday
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=12)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=16)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(
dia + timedelta(hours=24)).code == 'P2'
with context('3.0A with just one period'):
with before.all:
self.tarifa = T30A_one_period()
with it('should have code 3.0A'):
assert self.tarifa.code == '3.0A'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with context('3.1A'):
with before.all:
self.tarifa = T31A()
with it('should have code 3.1A'):
assert self.tarifa.code == '3.1A'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to friday 31/10/17
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=25)).code == 'P6'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to thursday 14/08/17
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P2'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to latest hour of friday
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=25)).code == 'P6'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia).code == 'P2' # points to latest hour of friday
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18)).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23)).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=24)).code == 'P5'
with context('3.1A with just one period'):
with before.all:
self.tarifa = T31A_one_period()
with it('should have code 3.1A'):
assert self.tarifa.code == '3.1A'
with it('should have correct period on holiday winter data'):
dia = self.winter_holiday_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on holiday summer data'):
dia = self.summer_holiday_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on laboral winter data'):
dia = self.winter_laboral_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on laboral summer data'):
dia = self.summer_laboral_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on weekend winter data'):
dia = self.winter_weekend_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
with it('should have correct period on weekend summer data'):
dia = self.summer_weekend_day
for hour in range(1,25):
assert self.tarifa.get_period_by_date(dia + timedelta(hours=hour)).code == 'P1'
# New TD tariffs
with description("TD tariffs"):
with before.all:
self.holidays = [datetime(2021, 1, 1).date(),
datetime(2021, 1, 6).date(),
datetime(2021, 4, 18).date(),
datetime(2021, 5, 1).date(),
datetime(2021, 8, 15).date(),
datetime(2021, 11, 1).date(),
datetime(2021, 12, 6).date(),
datetime(2021, 12, 8).date(),
datetime(2021, 12, 25).date(),
# 2022
datetime(2022, 1, 1).date(),
datetime(2022, 1, 6).date(),
datetime(2022, 4, 18).date(),
datetime(2022, 5, 1).date(),
datetime(2022, 8, 15).date(),
datetime(2022, 11, 1).date(),
datetime(2022, 12, 6).date(),
datetime(2022, 12, 8).date(),
datetime(2022, 12, 25).date()
]
self.winter_holiday_day = datetime(2022, 1, 6)
self.summer_holiday_day = datetime(2021, 8, 15)
self.winter_weekend_day = datetime(2021, 12, 18)
self.summer_weekend_day = datetime(2021, 6, 20)
self.winter_laboral_day = datetime(2021, 11, 12)
self.summer_laboral_day = datetime(2021, 7, 16)
# different season days to test 6 periods
self.january_day = datetime(2022, 1, 19) # "A" in zones 1, 4 and 5. "M" in zones 2 and 3.
self.march_day = datetime(2022, 3, 15) # "M" in zones 1, 3 and 4. "B" in zones 2 and 5.
self.april_day = datetime(2022, 4, 13) # "B" in all zones.
self.may_day = datetime(2022, 5, 4) # "MA" in zone 2. "B" in zones 1 and 3, 4 and 5.
self.june_day = datetime(2021, 6, 4) # "A" in zone 2. "M" in zones 1 and 5. "B" in zones 3 and 4.
self.august_day = datetime(2021, 8, 16) # "A" in zones 2, 3, 4 and 5. "M" in zone 1.
self.december_day = datetime(2021, 12, 13) # "A" in zone 1, "MA" in zones 3 and 5, "M" in zone 2 and 4.
with context("2.0TD"):
with before.all:
self.tarifa = T20TD()
with it('should have code 2.0TD'):
assert self.tarifa.code == '2.0TD'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'BT'
assert self.tarifa.min_power == 0
assert self.tarifa.max_power == 15
with it('should have correct geom zone'):
assert self.tarifa.geom_zone == '1'
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 3
assert len(self.tarifa.power_periods) == 2
assert self.tarifa.get_number_of_periods() == 3
assert not self.tarifa.has_holidays_periods
with it('should have correct energy period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P2'
with it('should have correct energy period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P2'
with it('should have correct energy period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('must have the correct energy period on time change days'):
assert self.tarifa.get_period_by_timestamp('2021-03-28 01') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 02') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 03') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 04') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 05') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 06') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 07') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 08') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 09') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 10') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 11') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 12') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 13') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 14') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 15') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 16') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 17') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 18') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 19') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 20') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 21') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 22') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-03-28 23') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 01') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 02') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 03') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 04') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 05') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 06') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 07') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 08') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 09') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 10') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 11') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 12') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 13') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 14') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 15') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 16') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 17') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 18') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 19') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 20') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 21') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 22') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 23') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 24') == 'P3'
assert self.tarifa.get_period_by_timestamp('2021-10-24 25') == 'P3'
with it('should have correct power period'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays, magn='tp').code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays,
magn='tp').code == 'P2'
with context("3.0TD"):
with before.all:
self.tarifa = T30TD()
with it('should have code 3.0TD'):
assert self.tarifa.code == '3.0TD'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'BT'
assert self.tarifa.min_power == 15
assert self.tarifa.max_power == 100000
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 6
assert len(self.tarifa.power_periods) == 6
assert self.tarifa.get_number_of_periods() == 6
assert not self.tarifa.has_holidays_periods
with it('should have correct energy period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('should have correct energy period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('should have correct energy period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P2'
with it('should have correct energy period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('should have correct energy period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('must have the correct energy period on time change days'):
assert self.tarifa.get_period_by_timestamp('2021-03-28 01') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 02') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 03') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 04') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 05') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 06') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 07') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 08') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 09') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 10') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 11') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 12') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 13') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 14') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 15') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 16') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 17') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 18') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 19') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 20') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 21') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 22') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 23') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 01') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 02') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 03') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 04') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 05') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 06') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 07') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 08') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 09') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 10') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 11') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 12') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 13') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 14') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 15') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 16') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 17') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 18') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 19') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 20') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 21') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 22') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 23') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 24') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 25') == 'P6'
with it('should have correct power period'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays,
magn='tp').code == 'P6'
with context("6.1TD"):
with before.all:
self.tarifa = T61TD()
with it('should have code 6.1TD'):
assert self.tarifa.code == '6.1TD'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'AT'
assert self.tarifa.min_power == 0
assert self.tarifa.max_power == 100000
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 6
assert len(self.tarifa.power_periods) == 6
assert self.tarifa.get_number_of_periods() == 6
assert not self.tarifa.has_holidays_periods
with it('with losses have a trafo kva and losses_coeff'):
kwargs = {'kva': 50}
self.tarifa = T61TD(**kwargs)
assert self.tarifa.losses == 0.04
assert self.tarifa.kva == 50
assert self.tarifa.low_voltage_measure
# Tariff 6.2TD
with context("6.2TD"):
with before.all:
self.tarifa = T62TD()
with it('should have code 6.2TD'):
assert self.tarifa.code == '6.2TD'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'AT'
assert self.tarifa.min_power == 0
assert self.tarifa.max_power == 100000
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 6
assert len(self.tarifa.power_periods) == 6
assert self.tarifa.get_number_of_periods() == 6
assert not self.tarifa.has_holidays_periods
# Tariff 6.3TD
with context("6.3TD"):
with before.all:
self.tarifa = T63TD()
with it('should have code 6.3TD'):
assert self.tarifa.code == '6.3TD'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'AT'
assert self.tarifa.min_power == 0
assert self.tarifa.max_power == 100000
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 6
assert len(self.tarifa.power_periods) == 6
assert self.tarifa.get_number_of_periods() == 6
assert not self.tarifa.has_holidays_periods
# Tariff 6.4TD
with context("6.4TD"):
with before.all:
self.tarifa = T64TD()
with it('should have code 6.4TD'):
assert self.tarifa.code == '6.4TD'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'AT'
assert self.tarifa.min_power == 0
assert self.tarifa.max_power == 100000
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 6
assert len(self.tarifa.power_periods) == 6
assert self.tarifa.get_number_of_periods() == 6
assert not self.tarifa.has_holidays_periods
with context("3.0TDVE"):
with before.all:
self.tarifa = T30TDVE()
with it('should have code 3.0TDVE'):
assert self.tarifa.code == '3.0TDVE'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'BT'
assert self.tarifa.min_power == 15
assert self.tarifa.max_power == 100000
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 6
assert len(self.tarifa.power_periods) == 6
assert self.tarifa.get_number_of_periods() == 6
assert not self.tarifa.has_holidays_periods
with it('should have correct energy period on holiday winter data'):
dia = self.winter_holiday_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('should have correct energy period on holiday summer data'):
dia = self.summer_holiday_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('should have correct energy period on laboral winter data'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on laboral summer data'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P2'
with it('should have correct energy period on weekend winter data'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('should have correct energy period on weekend summer data'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with it('must have the correct energy period on time change days'):
assert self.tarifa.get_period_by_timestamp('2021-03-28 01') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 02') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 03') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 04') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 05') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 06') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 07') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 08') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 09') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 10') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 11') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 12') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 13') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 14') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 15') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 16') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 17') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 18') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 19') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 20') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 21') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 22') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-03-28 23') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 01') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 02') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 03') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 04') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 05') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 06') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 07') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 08') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 09') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 10') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 11') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 12') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 13') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 14') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 15') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 16') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 17') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 18') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 19') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 20') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 21') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 22') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 23') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 24') == 'P6'
assert self.tarifa.get_period_by_timestamp('2021-10-24 25') == 'P6'
with it('should have correct power period'):
dia = self.summer_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays, magn='tp').code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays,
magn='tp').code == 'P6'
with context("6.1TDVE"):
with before.all:
self.tarifa = T61TDVE()
with it('should have code 6.1TDVE'):
assert self.tarifa.code == '6.1TDVE'
with it('should have correct power margins and type'):
assert self.tarifa.type == 'AT'
assert self.tarifa.min_power == 0
assert self.tarifa.max_power == 100000
with it('should have correct energy and power periods'):
assert len(self.tarifa.energy_periods) == 6
assert len(self.tarifa.power_periods) == 6
assert self.tarifa.get_number_of_periods() == 6
assert not self.tarifa.has_holidays_periods
with context("Energy and Power periods depend on day type on geom zone 2"):
with before.all:
self.tarifa = T30TD(geom_zone='2')
with it('should have correct geom zone'):
assert self.tarifa.geom_zone == '2' # Balearic Islands
# demo days used
# (2021-06-04) # "A" in zone 2.
# (2022-05-04) # "MA" in zone 2.
# (2022-01-19) # "M" in zones 2.
# (2022-03-15) # "B" in zones 2.
with it('should have correct energy period on "A" day type'):
dia = self.june_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P2'
with it('should have correct energy period on "B" day type'):
dia = self.may_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on "B1" day type'):
dia = self.january_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P4'
with it('should have correct energy period on "C" day type'):
dia = self.march_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P5'
with it('should have correct energy period on "D" day type'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with context("Energy and Power periods depend on day type on geom zone 3 (Canary Islands)"):
with before.all:
self.tarifa = T30TD(geom_zone='3')
with it('should have correct geom zone'):
assert self.tarifa.geom_zone == '3' # Canary Islands
# demo days used
# (2021-08-16) # "A" in zone 3.
# (2021-11-12) # "MA" in zone 3.
# (2022-01-19) # "M" in zones 3.
# (2022-04-13) # "B" in zones 3.
with it('should have correct energy period on "A" day type'):
dia = self.august_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on "B" day type'):
dia = self.winter_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on "B1" day type'):
dia = self.january_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P4'
with it('should have correct energy period on "C" day type'):
dia = self.april_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P5'
with it('should have correct energy period on "D" day type'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with context("Energy and Power periods depend on day type on geom zone 4 (Ceuta)"):
with before.all:
self.tarifa = T30TD(geom_zone='4')
with it('should have correct geom zone'):
assert self.tarifa.geom_zone == '4' # Ceuta
# demo days used
# (2021-08-16) # "A" in zone 4.
# (2021-07-16) # "MA" in zone 4.
# (2022-03-15) # "M" in zones 4.
# (2022-04-13) # "B" in zones 4.
with it('should have correct energy period on "A" day type'):
dia = self.august_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P4'
with it('should have correct energy period on "B" day type'):
dia = self.summer_laboral_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on "B1" day type'):
dia = self.march_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P4'
with it('should have correct energy period on "C" day type'):
dia = self.april_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P5'
with it('should have correct energy period on "D" day type'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
with context("Energy and Power periods depend on day type on geom zone 5 (Melilla)"):
with before.all:
self.tarifa = T30TD(geom_zone='5')
with it('should have correct geom zone'):
assert self.tarifa.geom_zone == '5' # Melilla
# demo days used
# (2021-08-16) # "A" in zone 5.
# (2021-12-13) # "MA" in zone 5.
# (2021-06-04) # "M" in zones 5.
# (2022-04-13) # "B" in zones 5.
with it('should have correct energy period on "A" day type'):
dia = self.august_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P1'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P2'
with it('should have correct energy period on "B" day type'):
dia = self.december_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P2'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P3'
with it('should have correct energy period on "B1" day type'):
dia = self.june_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P3'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P4'
with it('should have correct energy period on "C" day type'):
dia = self.april_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P4'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P5'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P5'
with it('should have correct energy period on "D" day type'):
dia = self.winter_weekend_day
assert self.tarifa.get_period_by_date(dia, self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=1), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=2), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=3), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=4), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=5), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=6), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=7), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=8), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=9), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=10), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=11), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=12), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=13), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=14), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=15), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=16), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=17), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=18), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=19), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=20), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=21), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=22), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23), self.holidays).code == 'P6'
assert self.tarifa.get_period_by_date(dia + timedelta(hours=23, minutes=59), self.holidays).code == 'P6'
| 76.565041
| 150
| 0.642917
| 31,828
| 226,020
| 4.366753
| 0.010682
| 0.140807
| 0.219995
| 0.254545
| 0.965284
| 0.952232
| 0.941339
| 0.937425
| 0.931417
| 0.926755
| 0
| 0.047312
| 0.219711
| 226,020
| 2,951
| 151
| 76.590986
| 0.740759
| 0.006756
| 0
| 0.828246
| 0
| 0.365576
| 0.065243
| 0
| 0
| 0
| 0
| 0
| 0.751747
| 1
| 0.001471
| false
| 0
| 0.002207
| 0
| 0.003678
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
2d6d35b1067f01ac350b5857b3907194f0a7e9ea
| 215
|
py
|
Python
|
iwfs/app_config.py
|
usc-isi-i2/isi-wikidata-query-service
|
b2e708cbd9ffe55895fecd2f345aa9a312dbd6e9
|
[
"MIT"
] | null | null | null |
iwfs/app_config.py
|
usc-isi-i2/isi-wikidata-query-service
|
b2e708cbd9ffe55895fecd2f345aa9a312dbd6e9
|
[
"MIT"
] | 1
|
2021-11-16T20:03:14.000Z
|
2021-11-16T20:03:14.000Z
|
iwfs/app_config.py
|
usc-isi-i2/kgtk-search
|
ec9a07b4e1114b0f024963a23efaf38ff79f7efb
|
[
"MIT"
] | 1
|
2020-11-12T00:09:13.000Z
|
2020-11-12T00:09:13.000Z
|
TEXT_EMBEDDING_LARGE_ALL = '/Users/amandeep/Github/isi-wikidata-query-service/iwfs/test_data/text-embeddings'
WIKI_LABELS = '/Users/amandeep/Github/isi-wikidata-query-service/iwfs/test_data/labels/labels.en.tsv.gz'
| 71.666667
| 109
| 0.827907
| 33
| 215
| 5.212121
| 0.606061
| 0.151163
| 0.22093
| 0.255814
| 0.627907
| 0.627907
| 0.627907
| 0.627907
| 0.627907
| 0.627907
| 0
| 0
| 0.027907
| 215
| 2
| 110
| 107.5
| 0.822967
| 0
| 0
| 0
| 0
| 1
| 0.781395
| 0.781395
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2dcf20eaa01f22c0ded0b3beb982f629af2fc06c
| 82,701
|
py
|
Python
|
tests/test_event.py
|
Jajcus/python-alsa-midi
|
1747fc85482b747f96e2ae3e9c8fe4f2c39a9391
|
[
"MIT"
] | 4
|
2022-01-26T22:33:11.000Z
|
2022-03-21T18:05:26.000Z
|
tests/test_event.py
|
Jajcus/python-alsa-midi
|
1747fc85482b747f96e2ae3e9c8fe4f2c39a9391
|
[
"MIT"
] | 1
|
2022-01-27T21:01:18.000Z
|
2022-01-29T17:21:17.000Z
|
tests/test_event.py
|
Jajcus/python-alsa-midi
|
1747fc85482b747f96e2ae3e9c8fe4f2c39a9391
|
[
"MIT"
] | 1
|
2021-12-26T16:59:13.000Z
|
2021-12-26T16:59:13.000Z
|
import pytest
from alsa_midi import (ActiveSensingEvent, Address, BounceEvent, ChannelPressureEvent,
ClientChangeEvent, ClientExitEvent, ClientStartEvent, ClockEvent,
ContinueEvent, Control14BitChangeEvent, ControlChangeEvent, EchoEvent,
Event, EventFlags, EventType, KeyPressureEvent, KeySignatureEvent,
MidiBytesEvent, NonRegisteredParameterChangeEvent, NoteEvent, NoteOffEvent,
NoteOnEvent, OSSEvent, PitchBendEvent, PortChangeEvent, PortExitEvent,
PortStartEvent, PortSubscribedEvent, PortUnsubscribedEvent,
ProgramChangeEvent, QueueSkewEvent, RealTime,
RegisteredParameterChangeEvent, ResetEvent, ResultEvent,
SetQueuePositionTickEvent, SetQueuePositionTimeEvent, SetQueueTempoEvent,
SongPositionPointerEvent, SongSelectEvent, StartEvent, StopEvent,
SyncPositionChangedEvent, SysExEvent, SystemEvent, TickEvent,
TimeSignatureEvent, TuneRequestEvent, UserVar0Event, UserVar1Event,
UserVar2Event, UserVar3Event, alsa, ffi)
from alsa_midi.event import ExternalDataEventBase
def test_event():
event = Event()
assert event.type is None
assert event.flags == 0
assert event.tag == 0
assert event.queue_id is None
assert event.time is None
assert event.tick is None
assert event.source is None
assert event.dest is None
assert event.relative is None
assert event.raw_data is None
assert repr(event) == "<Event unknown>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == 0
assert alsa_event.flags == 0
assert alsa_event.tag == 0
assert alsa_event.queue == alsa.SND_SEQ_QUEUE_DIRECT
assert alsa_event.time.tick == 0
assert alsa_event.time.time.tv_sec == 0
assert alsa_event.time.time.tv_nsec == 0
assert alsa_event.source.client == 0
assert alsa_event.source.port == 0
assert alsa_event.dest.client == alsa.SND_SEQ_ADDRESS_SUBSCRIBERS
assert alsa_event.dest.port == 0
assert bytes(ffi.buffer(alsa_event.data.raw8.d)) == b"\x00" * ffi.sizeof(alsa_event.data)
assert repr(event) == "<Event unknown>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event,
queue=11,
port=12,
dest=Address(13, 14))
assert result is alsa_event
assert alsa_event.type == 0
assert alsa_event.flags == 0
assert alsa_event.tag == 0
assert alsa_event.queue == 11
assert alsa_event.time.tick == 0
assert alsa_event.time.time.tv_sec == 0
assert alsa_event.time.time.tv_nsec == 0
assert alsa_event.source.client == 0
assert alsa_event.source.port == 12
assert alsa_event.dest.client == 13
assert alsa_event.dest.port == 14
assert repr(event) == "<Event unknown>"
event = Event(type=EventType.NOTEON,
flags=EventFlags.PRIORITY_HIGH,
tag=1,
queue_id=2,
time=RealTime(3, 4),
source=(5, 6),
dest=(7, 8),
relative=True,
raw_data=b"abcde"
)
assert event.type == EventType.NOTEON
assert event.flags == EventFlags.PRIORITY_HIGH
assert event.tag == 1
assert event.queue_id == 2
assert event.time == RealTime(3, 4)
assert event.tick is None
assert event.source == Address(5, 6)
assert event.dest == Address(7, 8)
assert event.relative is True
assert event.raw_data == b"abcde"
assert repr(event) == "<Event NOTEON>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTEON
assert EventFlags(alsa_event.flags) == (EventFlags.PRIORITY_HIGH | EventFlags.TIME_MODE_REL
| EventFlags.TIME_STAMP_REAL)
assert alsa_event.tag == 1
assert alsa_event.queue == 2
assert alsa_event.time.time.tv_sec == 3
assert alsa_event.time.time.tv_nsec == 4
assert alsa_event.source.client == 5
assert alsa_event.source.port == 6
assert alsa_event.dest.client == 7
assert alsa_event.dest.port == 8
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event,
queue=11,
port=12,
dest=Address(13, 14))
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTEON
assert EventFlags(alsa_event.flags) == (EventFlags.PRIORITY_HIGH | EventFlags.TIME_MODE_REL
| EventFlags.TIME_STAMP_REAL)
assert alsa_event.tag == 1
assert alsa_event.queue == 11
assert alsa_event.time.time.tv_sec == 3
assert alsa_event.time.time.tv_nsec == 4
assert alsa_event.source.client == 0
assert alsa_event.source.port == 12
assert alsa_event.dest.client == 13
assert alsa_event.dest.port == 14
with pytest.raises(ValueError):
event = Event(type=EventType.NOTEOFF,
time=0.1,
tick=17,
)
def test_event_from_alsa():
alsa_event = ffi.new("snd_seq_event_t *")
event = Event._from_alsa(alsa_event)
assert event.type == 0
assert event.flags == 0
assert event.tag == 0
assert event.queue_id == 0
assert event.time is None
assert event.tick == 0
assert event.source == Address(0, 0)
assert event.dest == Address(0, 0)
assert event.relative is False
assert event.raw_data == b"\x00" * ffi.sizeof(alsa_event.data)
assert repr(event) == "<Event SYSTEM>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_NOTEOFF
alsa_event.flags = alsa.SND_SEQ_TIME_STAMP_REAL | alsa.SND_SEQ_TIME_MODE_REL
alsa_event.tag = 5
alsa_event.queue = 6
alsa_event.time.time.tv_sec = 7
alsa_event.time.time.tv_nsec = 8
alsa_event.source.client = 9
alsa_event.source.port = 10
alsa_event.dest.client = 11
alsa_event.dest.port = 12
ffi.buffer(alsa_event.data.raw8.d)[:] = b"x" * ffi.sizeof(alsa_event.data)
event = Event._from_alsa(alsa_event)
assert event.type == EventType.NOTEOFF
assert event.flags == alsa.SND_SEQ_TIME_STAMP_REAL | alsa.SND_SEQ_TIME_MODE_REL
assert event.tag == 5
assert event.queue_id == 6
assert event.time == RealTime(7, 8)
assert event.tick is None
assert event.source == Address(9, 10)
assert event.dest == Address(11, 12)
assert event.relative is True
assert event.raw_data == b"x" * ffi.sizeof(alsa_event.data)
assert repr(event) == "<Event NOTEOFF>"
def test_midi_bytest_event():
event = MidiBytesEvent(b"abcde", tag=5)
assert isinstance(event, Event)
assert event.tag == 5
assert event.midi_bytes == b"abcde"
assert repr(event) == "<MidiBytesEvent 61 62 63 64 65>"
event = MidiBytesEvent([0x00, 0x01], tag=6)
assert isinstance(event, Event)
assert event.tag == 6
assert event.midi_bytes == b"\x00\x01"
assert repr(event) == "<MidiBytesEvent 00 01>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == 0
assert alsa_event.tag == 6
# that is to be filled separately
assert bytes(ffi.buffer(alsa_event.data.raw8.d)) == b"\x00" * ffi.sizeof(alsa_event.data)
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_NOTEOFF
alsa_event.flags = alsa.SND_SEQ_TIME_STAMP_REAL | alsa.SND_SEQ_TIME_MODE_REL
alsa_event.tag = 5
alsa_event.queue = 6
alsa_event.time.time.tv_sec = 7
alsa_event.time.time.tv_nsec = 8
alsa_event.source.client = 9
alsa_event.source.port = 10
alsa_event.dest.client = 11
alsa_event.dest.port = 12
ffi.buffer(alsa_event.data.raw8.d)[:] = b"x" * ffi.sizeof(alsa_event.data)
event = MidiBytesEvent._from_alsa(alsa_event, midi_bytes=b"abcd")
assert isinstance(event, MidiBytesEvent)
assert event.type == EventType.NOTEOFF
assert event.flags == alsa.SND_SEQ_TIME_STAMP_REAL | alsa.SND_SEQ_TIME_MODE_REL
assert event.tag == 5
assert event.queue_id == 6
assert event.time == RealTime(7, 8)
assert event.tick is None
assert event.source == Address(9, 10)
assert event.dest == Address(11, 12)
assert event.relative is True
assert event.raw_data == b"x" * ffi.sizeof(alsa_event.data)
assert repr(event) == "<MidiBytesEvent 61 62 63 64>"
def test_system_event():
event = SystemEvent(event=1, result=2, tag=3)
assert isinstance(event, SystemEvent)
assert isinstance(event, Event)
assert event.type == EventType.SYSTEM
assert event.event == 1
assert event.result == 2
assert event.tag == 3
assert repr(event) == "<SystemEvent event=1 result=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SYSTEM
assert alsa_event.data.result.event == 1
assert alsa_event.data.result.result == 2
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SYSTEM
alsa_event.tag = 5
alsa_event.data.result.event = 6
alsa_event.data.result.result = 7
event = SystemEvent._from_alsa(alsa_event)
assert isinstance(event, SystemEvent)
assert event.type == EventType.SYSTEM
assert event.tag == 5
assert event.event == 6
assert event.result == 7
assert repr(event) == "<SystemEvent event=6 result=7>"
def test_result_event():
event = ResultEvent(event=1, result=2, tag=3)
assert isinstance(event, ResultEvent)
assert isinstance(event, Event)
assert event.type == EventType.RESULT
assert event.event == 1
assert event.result == 2
assert event.tag == 3
assert repr(event) == "<ResultEvent event=1 result=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_RESULT
assert alsa_event.data.result.event == 1
assert alsa_event.data.result.result == 2
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_RESULT
alsa_event.tag = 5
alsa_event.data.result.event = 6
alsa_event.data.result.result = 7
event = ResultEvent._from_alsa(alsa_event)
assert isinstance(event, ResultEvent)
assert event.type == EventType.RESULT
assert event.tag == 5
assert event.event == 6
assert event.result == 7
assert repr(event) == "<ResultEvent event=6 result=7>"
def test_note_event():
event = NoteEvent(note=61, tag=3)
assert isinstance(event, NoteEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTE
assert event.note == 61
assert event.channel == 0
assert event.velocity == 127
assert event.off_velocity == 0
assert event.duration == 0
assert repr(event) == "<NoteEvent channel=0 note=61 velocity=127 duration=0 off_velocity=0>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTE
assert alsa_event.data.note.note == 61
assert alsa_event.data.note.channel == 0
assert alsa_event.data.note.velocity == 127
assert alsa_event.data.note.off_velocity == 0
assert alsa_event.data.note.duration == 0
assert alsa_event.tag == 3
event = NoteEvent(note=62, channel=5, velocity=6, duration=7, off_velocity=8, tag=9)
assert isinstance(event, NoteEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTE
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert event.duration == 7
assert event.off_velocity == 8
assert repr(event) == "<NoteEvent channel=5 note=62 velocity=6 duration=7 off_velocity=8>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTE
assert alsa_event.data.note.note == 62
assert alsa_event.data.note.channel == 5
assert alsa_event.data.note.velocity == 6
assert alsa_event.data.note.duration == 7
assert alsa_event.data.note.off_velocity == 8
assert alsa_event.tag == 9
event = NoteEvent(62, 5, 6, 7, 8, tag=9)
assert isinstance(event, NoteEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTE
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert event.duration == 7
assert event.off_velocity == 8
assert repr(event) == "<NoteEvent channel=5 note=62 velocity=6 duration=7 off_velocity=8>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_NOTE
alsa_event.tag = 5
alsa_event.data.note.note = 63
alsa_event.data.note.channel = 5
alsa_event.data.note.velocity = 6
alsa_event.data.note.duration = 7
alsa_event.data.note.off_velocity = 8
event = NoteEvent._from_alsa(alsa_event)
assert isinstance(event, NoteEvent)
assert event.type == EventType.NOTE
assert event.tag == 5
assert event.note == 63
assert event.channel == 5
assert event.velocity == 6
assert event.duration == 7
assert event.off_velocity == 8
assert repr(event) == "<NoteEvent channel=5 note=63 velocity=6 duration=7 off_velocity=8>"
def test_note_on_event():
event = NoteOnEvent(note=61, tag=3)
assert isinstance(event, NoteOnEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTEON
assert event.note == 61
assert event.channel == 0
assert event.velocity == 127
assert repr(event) == "<NoteOnEvent channel=0 note=61 velocity=127>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTEON
assert alsa_event.data.note.note == 61
assert alsa_event.data.note.channel == 0
assert alsa_event.data.note.velocity == 127
assert alsa_event.tag == 3
event = NoteOnEvent(note=62, channel=5, velocity=6, tag=9)
assert isinstance(event, NoteOnEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTEON
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<NoteOnEvent channel=5 note=62 velocity=6>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTEON
assert alsa_event.data.note.note == 62
assert alsa_event.data.note.channel == 5
assert alsa_event.data.note.velocity == 6
assert alsa_event.tag == 9
event = NoteOnEvent(62, 5, 6, tag=9)
assert isinstance(event, NoteOnEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTEON
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<NoteOnEvent channel=5 note=62 velocity=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_NOTEON
alsa_event.tag = 5
alsa_event.data.note.note = 63
alsa_event.data.note.channel = 5
alsa_event.data.note.velocity = 6
event = NoteOnEvent._from_alsa(alsa_event)
assert isinstance(event, NoteOnEvent)
assert event.type == EventType.NOTEON
assert event.tag == 5
assert event.note == 63
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<NoteOnEvent channel=5 note=63 velocity=6>"
def test_note_off_event():
event = NoteOffEvent(note=61, tag=3)
assert isinstance(event, NoteOffEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTEOFF
assert event.note == 61
assert event.channel == 0
assert event.velocity == 127
assert repr(event) == "<NoteOffEvent channel=0 note=61 velocity=127>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTEOFF
assert alsa_event.data.note.note == 61
assert alsa_event.data.note.channel == 0
assert alsa_event.data.note.velocity == 127
assert alsa_event.tag == 3
event = NoteOffEvent(note=62, channel=5, velocity=6, tag=9)
assert isinstance(event, NoteOffEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTEOFF
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<NoteOffEvent channel=5 note=62 velocity=6>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NOTEOFF
assert alsa_event.data.note.note == 62
assert alsa_event.data.note.channel == 5
assert alsa_event.data.note.velocity == 6
assert alsa_event.tag == 9
event = NoteOffEvent(62, 5, 6, tag=9)
assert isinstance(event, NoteOffEvent)
assert isinstance(event, Event)
assert event.type == EventType.NOTEOFF
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<NoteOffEvent channel=5 note=62 velocity=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_NOTEOFF
alsa_event.tag = 5
alsa_event.data.note.note = 63
alsa_event.data.note.channel = 5
alsa_event.data.note.velocity = 6
event = NoteOffEvent._from_alsa(alsa_event)
assert isinstance(event, NoteOffEvent)
assert event.type == EventType.NOTEOFF
assert event.tag == 5
assert event.note == 63
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<NoteOffEvent channel=5 note=63 velocity=6>"
def test_key_pressure_event():
event = KeyPressureEvent(note=61, tag=3)
assert isinstance(event, KeyPressureEvent)
assert isinstance(event, Event)
assert event.type == EventType.KEYPRESS
assert event.note == 61
assert event.channel == 0
assert event.velocity == 127
assert repr(event) == "<KeyPressureEvent channel=0 note=61 velocity=127>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_KEYPRESS
assert alsa_event.data.note.note == 61
assert alsa_event.data.note.channel == 0
assert alsa_event.data.note.velocity == 127
assert alsa_event.tag == 3
event = KeyPressureEvent(note=62, channel=5, velocity=6, tag=9)
assert isinstance(event, KeyPressureEvent)
assert isinstance(event, Event)
assert event.type == EventType.KEYPRESS
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<KeyPressureEvent channel=5 note=62 velocity=6>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_KEYPRESS
assert alsa_event.data.note.note == 62
assert alsa_event.data.note.channel == 5
assert alsa_event.data.note.velocity == 6
assert alsa_event.tag == 9
event = KeyPressureEvent(62, 5, 6, tag=9)
assert isinstance(event, KeyPressureEvent)
assert isinstance(event, Event)
assert event.type == EventType.KEYPRESS
assert event.note == 62
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<KeyPressureEvent channel=5 note=62 velocity=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_KEYPRESS
alsa_event.tag = 5
alsa_event.data.note.note = 63
alsa_event.data.note.channel = 5
alsa_event.data.note.velocity = 6
event = KeyPressureEvent._from_alsa(alsa_event)
assert isinstance(event, KeyPressureEvent)
assert event.type == EventType.KEYPRESS
assert event.tag == 5
assert event.note == 63
assert event.channel == 5
assert event.velocity == 6
assert repr(event) == "<KeyPressureEvent channel=5 note=63 velocity=6>"
def test_control_change_event():
event = ControlChangeEvent(channel=1, param=2, value=3, tag=4)
assert isinstance(event, ControlChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.CONTROLLER
assert event.channel == 1
assert event.param == 2
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<ControlChangeEvent channel=1 param=2 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CONTROLLER
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.param == 2
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = ControlChangeEvent(5, 6, 7, tag=8)
assert isinstance(event, ControlChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.CONTROLLER
assert event.channel == 5
assert event.param == 6
assert event.value == 7
assert event.tag == 8
assert repr(event) == "<ControlChangeEvent channel=5 param=6 value=7>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CONTROLLER
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.param = 11
alsa_event.data.control.value = 12
event = ControlChangeEvent._from_alsa(alsa_event)
assert isinstance(event, ControlChangeEvent)
assert event.type == EventType.CONTROLLER
assert event.tag == 9
assert event.channel == 10
assert event.param == 11
assert event.value == 12
assert repr(event) == "<ControlChangeEvent channel=10 param=11 value=12>"
def test_program_change_event():
event = ProgramChangeEvent(channel=1, value=3, tag=4)
assert isinstance(event, ProgramChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.PGMCHANGE
assert event.channel == 1
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<ProgramChangeEvent channel=1 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PGMCHANGE
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = ProgramChangeEvent(5, 6, tag=8)
assert isinstance(event, ProgramChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.PGMCHANGE
assert event.channel == 5
assert event.value == 6
assert event.tag == 8
assert repr(event) == "<ProgramChangeEvent channel=5 value=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_PGMCHANGE
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.value = 12
event = ProgramChangeEvent._from_alsa(alsa_event)
assert isinstance(event, ProgramChangeEvent)
assert event.type == EventType.PGMCHANGE
assert event.tag == 9
assert event.channel == 10
assert event.value == 12
assert repr(event) == "<ProgramChangeEvent channel=10 value=12>"
def test_channel_pressure_event():
event = ChannelPressureEvent(channel=1, value=3, tag=4)
assert isinstance(event, ChannelPressureEvent)
assert isinstance(event, Event)
assert event.type == EventType.CHANPRESS
assert event.channel == 1
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<ChannelPressureEvent channel=1 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CHANPRESS
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = ChannelPressureEvent(5, 6, tag=8)
assert isinstance(event, ChannelPressureEvent)
assert isinstance(event, Event)
assert event.type == EventType.CHANPRESS
assert event.channel == 5
assert event.value == 6
assert event.tag == 8
assert repr(event) == "<ChannelPressureEvent channel=5 value=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CHANPRESS
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.value = 12
event = ChannelPressureEvent._from_alsa(alsa_event)
assert isinstance(event, ChannelPressureEvent)
assert event.type == EventType.CHANPRESS
assert event.tag == 9
assert event.channel == 10
assert event.value == 12
assert repr(event) == "<ChannelPressureEvent channel=10 value=12>"
def test_pitch_bend_event():
event = PitchBendEvent(channel=1, value=3, tag=4)
assert isinstance(event, PitchBendEvent)
assert isinstance(event, Event)
assert event.type == EventType.PITCHBEND
assert event.channel == 1
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<PitchBendEvent channel=1 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PITCHBEND
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = PitchBendEvent(5, 6, tag=8)
assert isinstance(event, PitchBendEvent)
assert isinstance(event, Event)
assert event.type == EventType.PITCHBEND
assert event.channel == 5
assert event.value == 6
assert event.tag == 8
assert repr(event) == "<PitchBendEvent channel=5 value=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_PITCHBEND
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.value = 12
event = PitchBendEvent._from_alsa(alsa_event)
assert isinstance(event, PitchBendEvent)
assert event.type == EventType.PITCHBEND
assert event.tag == 9
assert event.channel == 10
assert event.value == 12
assert repr(event) == "<PitchBendEvent channel=10 value=12>"
def test_control_14bit_change_event():
event = Control14BitChangeEvent(channel=1, param=2, value=3, tag=4)
assert isinstance(event, Control14BitChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.CONTROL14
assert event.channel == 1
assert event.param == 2
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<Control14BitChangeEvent channel=1 param=2 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CONTROL14
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.param == 2
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = Control14BitChangeEvent(5, 6, 7, tag=8)
assert isinstance(event, Control14BitChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.CONTROL14
assert event.channel == 5
assert event.param == 6
assert event.value == 7
assert event.tag == 8
assert repr(event) == "<Control14BitChangeEvent channel=5 param=6 value=7>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CONTROL14
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.param = 11
alsa_event.data.control.value = 12
event = Control14BitChangeEvent._from_alsa(alsa_event)
assert isinstance(event, Control14BitChangeEvent)
assert event.type == EventType.CONTROL14
assert event.tag == 9
assert event.channel == 10
assert event.param == 11
assert event.value == 12
assert repr(event) == "<Control14BitChangeEvent channel=10 param=11 value=12>"
def test_non_registered_parameter_change_event():
event = NonRegisteredParameterChangeEvent(channel=1, param=2, value=3, tag=4)
assert isinstance(event, NonRegisteredParameterChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.NONREGPARAM
assert event.channel == 1
assert event.param == 2
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<NonRegisteredParameterChangeEvent channel=1 param=2 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_NONREGPARAM
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.param == 2
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = NonRegisteredParameterChangeEvent(5, 6, 7, tag=8)
assert isinstance(event, NonRegisteredParameterChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.NONREGPARAM
assert event.channel == 5
assert event.param == 6
assert event.value == 7
assert event.tag == 8
assert repr(event) == "<NonRegisteredParameterChangeEvent channel=5 param=6 value=7>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_NONREGPARAM
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.param = 11
alsa_event.data.control.value = 12
event = NonRegisteredParameterChangeEvent._from_alsa(alsa_event)
assert isinstance(event, NonRegisteredParameterChangeEvent)
assert event.type == EventType.NONREGPARAM
assert event.tag == 9
assert event.channel == 10
assert event.param == 11
assert event.value == 12
assert repr(event) == "<NonRegisteredParameterChangeEvent channel=10 param=11 value=12>"
def test_registered_parameter_change_event():
event = RegisteredParameterChangeEvent(channel=1, param=2, value=3, tag=4)
assert isinstance(event, RegisteredParameterChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.REGPARAM
assert event.channel == 1
assert event.param == 2
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<RegisteredParameterChangeEvent channel=1 param=2 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_REGPARAM
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.param == 2
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = RegisteredParameterChangeEvent(5, 6, 7, tag=8)
assert isinstance(event, RegisteredParameterChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.REGPARAM
assert event.channel == 5
assert event.param == 6
assert event.value == 7
assert event.tag == 8
assert repr(event) == "<RegisteredParameterChangeEvent channel=5 param=6 value=7>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_REGPARAM
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.param = 11
alsa_event.data.control.value = 12
event = RegisteredParameterChangeEvent._from_alsa(alsa_event)
assert isinstance(event, RegisteredParameterChangeEvent)
assert event.type == EventType.REGPARAM
assert event.tag == 9
assert event.channel == 10
assert event.param == 11
assert event.value == 12
assert repr(event) == "<RegisteredParameterChangeEvent channel=10 param=11 value=12>"
def test_song_position_pointer_event():
event = SongPositionPointerEvent(channel=1, value=3, tag=4)
assert isinstance(event, SongPositionPointerEvent)
assert isinstance(event, Event)
assert event.type == EventType.SONGPOS
assert event.channel == 1
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<SongPositionPointerEvent channel=1 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SONGPOS
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = SongPositionPointerEvent(5, 6, tag=8)
assert isinstance(event, SongPositionPointerEvent)
assert isinstance(event, Event)
assert event.type == EventType.SONGPOS
assert event.channel == 5
assert event.value == 6
assert event.tag == 8
assert repr(event) == "<SongPositionPointerEvent channel=5 value=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SONGPOS
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.value = 12
event = SongPositionPointerEvent._from_alsa(alsa_event)
assert isinstance(event, SongPositionPointerEvent)
assert event.type == EventType.SONGPOS
assert event.tag == 9
assert event.channel == 10
assert event.value == 12
assert repr(event) == "<SongPositionPointerEvent channel=10 value=12>"
def test_song_select_event():
event = SongSelectEvent(channel=1, value=3, tag=4)
assert isinstance(event, SongSelectEvent)
assert isinstance(event, Event)
assert event.type == EventType.SONGSEL
assert event.channel == 1
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<SongSelectEvent channel=1 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SONGSEL
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = SongSelectEvent(5, 6, tag=8)
assert isinstance(event, SongSelectEvent)
assert isinstance(event, Event)
assert event.type == EventType.SONGSEL
assert event.channel == 5
assert event.value == 6
assert event.tag == 8
assert repr(event) == "<SongSelectEvent channel=5 value=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SONGSEL
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.value = 12
event = SongSelectEvent._from_alsa(alsa_event)
assert isinstance(event, SongSelectEvent)
assert event.type == EventType.SONGSEL
assert event.tag == 9
assert event.channel == 10
assert event.value == 12
assert repr(event) == "<SongSelectEvent channel=10 value=12>"
def test_time_signature_event():
event = TimeSignatureEvent(channel=1, value=3, tag=4)
assert isinstance(event, TimeSignatureEvent)
assert isinstance(event, Event)
assert event.type == EventType.TIMESIGN
assert event.channel == 1
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<TimeSignatureEvent channel=1 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_TIMESIGN
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = TimeSignatureEvent(5, 6, tag=8)
assert isinstance(event, TimeSignatureEvent)
assert isinstance(event, Event)
assert event.type == EventType.TIMESIGN
assert event.channel == 5
assert event.value == 6
assert event.tag == 8
assert repr(event) == "<TimeSignatureEvent channel=5 value=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_TIMESIGN
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.value = 12
event = TimeSignatureEvent._from_alsa(alsa_event)
assert isinstance(event, TimeSignatureEvent)
assert event.type == EventType.TIMESIGN
assert event.tag == 9
assert event.channel == 10
assert event.value == 12
assert repr(event) == "<TimeSignatureEvent channel=10 value=12>"
def test_key_signature_event():
event = KeySignatureEvent(channel=1, value=3, tag=4)
assert isinstance(event, KeySignatureEvent)
assert isinstance(event, Event)
assert event.type == EventType.KEYSIGN
assert event.channel == 1
assert event.value == 3
assert event.tag == 4
assert repr(event) == "<KeySignatureEvent channel=1 value=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_KEYSIGN
assert alsa_event.data.control.channel == 1
assert alsa_event.data.control.value == 3
assert alsa_event.tag == 4
event = KeySignatureEvent(5, 6, tag=8)
assert isinstance(event, KeySignatureEvent)
assert isinstance(event, Event)
assert event.type == EventType.KEYSIGN
assert event.channel == 5
assert event.value == 6
assert event.tag == 8
assert repr(event) == "<KeySignatureEvent channel=5 value=6>"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_KEYSIGN
alsa_event.tag = 9
alsa_event.data.control.channel = 10
alsa_event.data.control.value = 12
event = KeySignatureEvent._from_alsa(alsa_event)
assert isinstance(event, KeySignatureEvent)
assert event.type == EventType.KEYSIGN
assert event.tag == 9
assert event.channel == 10
assert event.value == 12
assert repr(event) == "<KeySignatureEvent channel=10 value=12>"
def test_start_event():
event = StartEvent()
assert isinstance(event, StartEvent)
assert isinstance(event, Event)
assert event.type == EventType.START
assert event.control_queue is None
assert repr(event) == "<StartEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_START
assert alsa_event.data.queue.queue == 0
assert alsa_event.tag == 0
event = StartEvent(control_queue=2, tag=3)
assert isinstance(event, StartEvent)
assert isinstance(event, Event)
assert event.type == EventType.START
assert event.control_queue == 2
assert event.tag == 3
assert repr(event) == "<StartEvent queue=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_START
assert alsa_event.data.queue.queue == 2
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_START
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
event = StartEvent._from_alsa(alsa_event)
assert isinstance(event, StartEvent)
assert event.type == EventType.START
assert event.tag == 9
assert event.control_queue == 10
assert repr(event) == "<StartEvent queue=10>"
def test_continue_event():
event = ContinueEvent()
assert isinstance(event, ContinueEvent)
assert isinstance(event, Event)
assert event.type == EventType.CONTINUE
assert event.control_queue is None
assert repr(event) == "<ContinueEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CONTINUE
assert alsa_event.data.queue.queue == 0
assert alsa_event.tag == 0
event = ContinueEvent(control_queue=2, tag=3)
assert isinstance(event, ContinueEvent)
assert isinstance(event, Event)
assert event.type == EventType.CONTINUE
assert event.control_queue == 2
assert event.tag == 3
assert repr(event) == "<ContinueEvent queue=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CONTINUE
assert alsa_event.data.queue.queue == 2
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CONTINUE
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
event = ContinueEvent._from_alsa(alsa_event)
assert isinstance(event, ContinueEvent)
assert event.type == EventType.CONTINUE
assert event.tag == 9
assert event.control_queue == 10
assert repr(event) == "<ContinueEvent queue=10>"
def test_stop_event():
event = StopEvent()
assert isinstance(event, StopEvent)
assert isinstance(event, Event)
assert event.type == EventType.STOP
assert event.control_queue is None
assert repr(event) == "<StopEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_STOP
assert alsa_event.data.queue.queue == 0
assert alsa_event.tag == 0
event = StopEvent(control_queue=2, tag=3)
assert isinstance(event, StopEvent)
assert isinstance(event, Event)
assert event.type == EventType.STOP
assert event.control_queue == 2
assert event.tag == 3
assert repr(event) == "<StopEvent queue=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_STOP
assert alsa_event.data.queue.queue == 2
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_STOP
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
event = StopEvent._from_alsa(alsa_event)
assert isinstance(event, StopEvent)
assert event.type == EventType.STOP
assert event.tag == 9
assert event.control_queue == 10
assert repr(event) == "<StopEvent queue=10>"
def test_set_queue_position_tick_event():
event = SetQueuePositionTickEvent(position=10)
assert isinstance(event, SetQueuePositionTickEvent)
assert isinstance(event, Event)
assert event.type == EventType.SETPOS_TICK
assert event.control_queue is None
assert event.position == 10
assert repr(event) == "<SetQueuePositionTickEvent position=10>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SETPOS_TICK
assert alsa_event.data.queue.queue == 0
assert alsa_event.data.queue.param.time.tick == 10
assert alsa_event.tag == 0
event = SetQueuePositionTickEvent(control_queue=2, position=3, tag=4)
assert isinstance(event, SetQueuePositionTickEvent)
assert isinstance(event, Event)
assert event.type == EventType.SETPOS_TICK
assert event.control_queue == 2
assert event.position == 3
assert event.tag == 4
assert repr(event) == "<SetQueuePositionTickEvent queue=2 position=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SETPOS_TICK
assert alsa_event.data.queue.queue == 2
assert alsa_event.data.queue.param.time.tick == 3
assert alsa_event.tag == 4
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SETPOS_TICK
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
alsa_event.data.queue.param.time.tick = 11
event = SetQueuePositionTickEvent._from_alsa(alsa_event)
assert isinstance(event, SetQueuePositionTickEvent)
assert event.type == EventType.SETPOS_TICK
assert event.tag == 9
assert event.control_queue == 10
assert event.position == 11
assert repr(event) == "<SetQueuePositionTickEvent queue=10 position=11>"
def test_set_queue_position_time_event():
event = SetQueuePositionTimeEvent(position=10.0)
assert isinstance(event, SetQueuePositionTimeEvent)
assert isinstance(event, Event)
assert event.type == EventType.SETPOS_TIME
assert event.control_queue is None
assert event.position == RealTime(10, 0)
assert repr(event) == "<SetQueuePositionTimeEvent position=10.000000000>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SETPOS_TIME
assert alsa_event.data.queue.queue == 0
assert alsa_event.data.queue.param.time.time.tv_sec == 10
assert alsa_event.data.queue.param.time.time.tv_nsec == 0
assert alsa_event.tag == 0
event = SetQueuePositionTimeEvent(control_queue=2, position=RealTime(3, 3), tag=4)
assert isinstance(event, SetQueuePositionTimeEvent)
assert isinstance(event, Event)
assert event.type == EventType.SETPOS_TIME
assert event.control_queue == 2
assert event.position == RealTime(3, 3)
assert event.tag == 4
assert repr(event) == "<SetQueuePositionTimeEvent queue=2 position=3.000000003>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SETPOS_TIME
assert alsa_event.data.queue.queue == 2
assert alsa_event.data.queue.param.time.time.tv_sec == 3
assert alsa_event.data.queue.param.time.time.tv_nsec == 3
assert alsa_event.tag == 4
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SETPOS_TIME
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
alsa_event.data.queue.param.time.time.tv_sec = 11
alsa_event.data.queue.param.time.time.tv_nsec = 12
event = SetQueuePositionTimeEvent._from_alsa(alsa_event)
assert isinstance(event, SetQueuePositionTimeEvent)
assert event.type == EventType.SETPOS_TIME
assert event.tag == 9
assert event.control_queue == 10
assert event.position == RealTime(11, 12)
assert repr(event) == "<SetQueuePositionTimeEvent queue=10 position=11.000000012>"
def test_set_queue_tempo_event():
with pytest.raises(ValueError):
SetQueueTempoEvent()
event = SetQueueTempoEvent(midi_tempo=500000)
assert isinstance(event, SetQueueTempoEvent)
assert isinstance(event, Event)
assert event.type == EventType.TEMPO
assert event.control_queue is None
assert event.midi_tempo == 500000
assert event.bpm == pytest.approx(120.0)
assert repr(event) == "<SetQueueTempoEvent tempo=500000 (120.0 bpm)>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_TEMPO
assert alsa_event.data.queue.queue == 0
assert alsa_event.data.queue.param.value == 500000
assert alsa_event.tag == 0
event = SetQueueTempoEvent(control_queue=2, bpm=90, tag=4)
assert isinstance(event, SetQueueTempoEvent)
assert isinstance(event, Event)
assert event.type == EventType.TEMPO
assert event.control_queue == 2
assert event.midi_tempo == 666667
assert event.bpm == pytest.approx(90.0)
assert event.tag == 4
assert repr(event) == "<SetQueueTempoEvent queue=2 tempo=666667 (90.0 bpm)>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_TEMPO
assert alsa_event.data.queue.queue == 2
assert alsa_event.data.queue.param.value == 666667
assert alsa_event.tag == 4
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_TEMPO
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
alsa_event.data.queue.param.value = 1000000
event = SetQueueTempoEvent._from_alsa(alsa_event)
assert isinstance(event, SetQueueTempoEvent)
assert event.type == EventType.TEMPO
assert event.tag == 9
assert event.control_queue == 10
assert event.midi_tempo == 1000000
assert event.bpm == pytest.approx(60.0)
assert repr(event) == "<SetQueueTempoEvent queue=10 tempo=1000000 (60.0 bpm)>"
def test_clock_event():
event = ClockEvent()
assert isinstance(event, ClockEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLOCK
assert event.control_queue is None
assert repr(event) == "<ClockEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLOCK
assert alsa_event.data.queue.queue == 0
assert alsa_event.tag == 0
event = ClockEvent(control_queue=2, tag=3)
assert isinstance(event, ClockEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLOCK
assert event.control_queue == 2
assert event.tag == 3
assert repr(event) == "<ClockEvent queue=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLOCK
assert alsa_event.data.queue.queue == 2
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CLOCK
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
event = ClockEvent._from_alsa(alsa_event)
assert isinstance(event, ClockEvent)
assert event.type == EventType.CLOCK
assert event.tag == 9
assert event.control_queue == 10
assert repr(event) == "<ClockEvent queue=10>"
def test_tick_event():
event = TickEvent()
assert isinstance(event, TickEvent)
assert isinstance(event, Event)
assert event.type == EventType.TICK
assert event.control_queue is None
assert repr(event) == "<TickEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_TICK
assert alsa_event.data.queue.queue == 0
assert alsa_event.tag == 0
event = TickEvent(control_queue=2, tag=3)
assert isinstance(event, TickEvent)
assert isinstance(event, Event)
assert event.type == EventType.TICK
assert event.control_queue == 2
assert event.tag == 3
assert repr(event) == "<TickEvent queue=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_TICK
assert alsa_event.data.queue.queue == 2
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_TICK
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
event = TickEvent._from_alsa(alsa_event)
assert isinstance(event, TickEvent)
assert event.type == EventType.TICK
assert event.tag == 9
assert event.control_queue == 10
assert repr(event) == "<TickEvent queue=10>"
def test_set_queue_skew_event():
event = QueueSkewEvent(value=1, base=2)
assert isinstance(event, QueueSkewEvent)
assert isinstance(event, Event)
assert event.type == EventType.QUEUE_SKEW
assert event.control_queue is None
assert event.value == 1
assert event.base == 2
assert repr(event) == "<QueueSkewEvent value=1 base=2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_QUEUE_SKEW
assert alsa_event.data.queue.queue == 0
assert alsa_event.data.queue.param.skew.value == 1
assert alsa_event.data.queue.param.skew.base == 2
assert alsa_event.tag == 0
event = QueueSkewEvent(2, 3, control_queue=4, tag=5)
assert isinstance(event, QueueSkewEvent)
assert isinstance(event, Event)
assert event.type == EventType.QUEUE_SKEW
assert event.control_queue == 4
assert event.value == 2
assert event.base == 3
assert event.tag == 5
assert repr(event) == "<QueueSkewEvent queue=4 value=2 base=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_QUEUE_SKEW
assert alsa_event.data.queue.queue == 4
assert alsa_event.data.queue.param.skew.value == 2
assert alsa_event.data.queue.param.skew.base == 3
assert alsa_event.tag == 5
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_QUEUE_SKEW
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
alsa_event.data.queue.param.skew.value = 11
alsa_event.data.queue.param.skew.base = 12
event = QueueSkewEvent._from_alsa(alsa_event)
assert isinstance(event, QueueSkewEvent)
assert event.type == EventType.QUEUE_SKEW
assert event.tag == 9
assert event.control_queue == 10
assert event.value == 11
assert event.base == 12
assert repr(event) == "<QueueSkewEvent queue=10 value=11 base=12>"
def test_sync_position_changed_event():
event = SyncPositionChangedEvent(position=10)
assert isinstance(event, SyncPositionChangedEvent)
assert isinstance(event, Event)
assert event.type == EventType.SYNC_POS
assert event.control_queue is None
assert event.position == 10
assert repr(event) == "<SyncPositionChangedEvent position=10>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SYNC_POS
assert alsa_event.data.queue.queue == 0
assert alsa_event.data.queue.param.position == 10
assert alsa_event.tag == 0
event = SyncPositionChangedEvent(control_queue=2, position=3, tag=4)
assert isinstance(event, SyncPositionChangedEvent)
assert isinstance(event, Event)
assert event.type == EventType.SYNC_POS
assert event.control_queue == 2
assert event.position == 3
assert event.tag == 4
assert repr(event) == "<SyncPositionChangedEvent queue=2 position=3>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SYNC_POS
assert alsa_event.data.queue.queue == 2
assert alsa_event.data.queue.param.position == 3
assert alsa_event.tag == 4
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SYNC_POS
alsa_event.tag = 9
alsa_event.data.queue.queue = 10
alsa_event.data.queue.param.position = 11
event = SyncPositionChangedEvent._from_alsa(alsa_event)
assert isinstance(event, SyncPositionChangedEvent)
assert event.type == EventType.SYNC_POS
assert event.tag == 9
assert event.control_queue == 10
assert event.position == 11
assert repr(event) == "<SyncPositionChangedEvent queue=10 position=11>"
def test_tune_request_event():
event = TuneRequestEvent()
assert isinstance(event, TuneRequestEvent)
assert isinstance(event, Event)
assert event.type == EventType.TUNE_REQUEST
assert repr(event) == "<TuneRequestEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_TUNE_REQUEST
assert alsa_event.tag == 0
event = TuneRequestEvent(tag=3)
assert isinstance(event, TuneRequestEvent)
assert isinstance(event, Event)
assert event.type == EventType.TUNE_REQUEST
assert event.tag == 3
assert repr(event) == "<TuneRequestEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_TUNE_REQUEST
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_TUNE_REQUEST
alsa_event.tag = 9
event = TuneRequestEvent._from_alsa(alsa_event)
assert isinstance(event, TuneRequestEvent)
assert event.type == EventType.TUNE_REQUEST
assert event.tag == 9
assert repr(event) == "<TuneRequestEvent>"
def test_reset_event():
event = ResetEvent()
assert isinstance(event, ResetEvent)
assert isinstance(event, Event)
assert event.type == EventType.RESET
assert repr(event) == "<ResetEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_RESET
assert alsa_event.tag == 0
event = ResetEvent(tag=3)
assert isinstance(event, ResetEvent)
assert isinstance(event, Event)
assert event.type == EventType.RESET
assert event.tag == 3
assert repr(event) == "<ResetEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_RESET
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_RESET
alsa_event.tag = 9
event = ResetEvent._from_alsa(alsa_event)
assert isinstance(event, ResetEvent)
assert event.type == EventType.RESET
assert event.tag == 9
assert repr(event) == "<ResetEvent>"
def test_active_sensing_event():
event = ActiveSensingEvent()
assert isinstance(event, ActiveSensingEvent)
assert isinstance(event, Event)
assert event.type == EventType.SENSING
assert repr(event) == "<ActiveSensingEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SENSING
assert alsa_event.tag == 0
event = ActiveSensingEvent(tag=3)
assert isinstance(event, ActiveSensingEvent)
assert isinstance(event, Event)
assert event.type == EventType.SENSING
assert event.tag == 3
assert repr(event) == "<ActiveSensingEvent>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SENSING
assert alsa_event.tag == 3
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SENSING
alsa_event.tag = 9
event = ActiveSensingEvent._from_alsa(alsa_event)
assert isinstance(event, ActiveSensingEvent)
assert event.type == EventType.SENSING
assert event.tag == 9
assert repr(event) == "<ActiveSensingEvent>"
def test_echo_event():
event = EchoEvent()
assert isinstance(event, EchoEvent)
assert isinstance(event, Event)
assert event.type == EventType.ECHO
assert repr(event) == "<EchoEvent data=None>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_ECHO
assert alsa_event.tag == 0
empty = b"\x00" * ffi.sizeof("snd_seq_ev_raw8_t")
assert bytes(ffi.buffer(alsa_event.data.raw8.d)) == empty
event = EchoEvent(tag=3, raw_data=b"abcd")
assert isinstance(event, EchoEvent)
assert isinstance(event, Event)
assert event.type == EventType.ECHO
assert event.tag == 3
assert repr(event).startswith("<EchoEvent data=b'abcd")
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_ECHO
assert alsa_event.tag == 3
assert ffi.buffer(alsa_event.data.raw8.d)[:4] == b"abcd"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_ECHO
alsa_event.tag = 9
ffi.buffer(alsa_event.data.raw8.d)[:5] = b"12345"
event = EchoEvent._from_alsa(alsa_event)
assert isinstance(event, EchoEvent)
assert event.type == EventType.ECHO
assert event.tag == 9
assert repr(event).startswith("<EchoEvent data=b'12345")
def test_oss_event():
event = OSSEvent()
assert isinstance(event, OSSEvent)
assert isinstance(event, Event)
assert event.type == EventType.OSS
assert repr(event) == "<OSSEvent data=None>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_OSS
assert alsa_event.tag == 0
empty = b"\x00" * ffi.sizeof("snd_seq_ev_raw8_t")
assert bytes(ffi.buffer(alsa_event.data.raw8.d)) == empty
event = OSSEvent(tag=3, raw_data=b"abcd")
assert isinstance(event, OSSEvent)
assert isinstance(event, Event)
assert event.type == EventType.OSS
assert event.tag == 3
assert repr(event).startswith("<OSSEvent data=b'abcd")
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_OSS
assert alsa_event.tag == 3
assert ffi.buffer(alsa_event.data.raw8.d)[:4] == b"abcd"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_OSS
alsa_event.tag = 9
ffi.buffer(alsa_event.data.raw8.d)[:5] = b"12345"
event = OSSEvent._from_alsa(alsa_event)
assert isinstance(event, OSSEvent)
assert event.type == EventType.OSS
assert event.tag == 9
assert repr(event).startswith("<OSSEvent data=b'12345")
def test_client_start_event():
event = ClientStartEvent(addr=Address(1, 2))
assert isinstance(event, ClientStartEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLIENT_START
assert event.addr == Address(1, 2)
assert repr(event) == "<ClientStartEvent 1:2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLIENT_START
assert alsa_event.data.addr.client == 1
assert alsa_event.data.addr.port == 2
assert alsa_event.tag == 0
event = ClientStartEvent((3, 4), tag=5)
assert isinstance(event, ClientStartEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLIENT_START
assert event.addr == Address(3, 4)
assert event.tag == 5
assert repr(event) == "<ClientStartEvent 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLIENT_START
assert alsa_event.data.addr.client == 3
assert alsa_event.data.addr.port == 4
assert alsa_event.tag == 5
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CLIENT_START
alsa_event.tag = 9
alsa_event.data.addr.client = 10
alsa_event.data.addr.port = 11
event = ClientStartEvent._from_alsa(alsa_event)
assert isinstance(event, ClientStartEvent)
assert event.type == EventType.CLIENT_START
assert event.tag == 9
assert event.addr == Address(10, 11)
assert repr(event) == "<ClientStartEvent 10:11>"
def test_client_exit_event():
event = ClientExitEvent(addr=Address(1, 2))
assert isinstance(event, ClientExitEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLIENT_EXIT
assert event.addr == Address(1, 2)
assert repr(event) == "<ClientExitEvent 1:2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLIENT_EXIT
assert alsa_event.data.addr.client == 1
assert alsa_event.data.addr.port == 2
assert alsa_event.tag == 0
event = ClientExitEvent((3, 4), tag=5)
assert isinstance(event, ClientExitEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLIENT_EXIT
assert event.addr == Address(3, 4)
assert event.tag == 5
assert repr(event) == "<ClientExitEvent 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLIENT_EXIT
assert alsa_event.data.addr.client == 3
assert alsa_event.data.addr.port == 4
assert alsa_event.tag == 5
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CLIENT_EXIT
alsa_event.tag = 9
alsa_event.data.addr.client = 10
alsa_event.data.addr.port = 11
event = ClientExitEvent._from_alsa(alsa_event)
assert isinstance(event, ClientExitEvent)
assert event.type == EventType.CLIENT_EXIT
assert event.tag == 9
assert event.addr == Address(10, 11)
assert repr(event) == "<ClientExitEvent 10:11>"
def test_client_change_event():
event = ClientChangeEvent(addr=Address(1, 2))
assert isinstance(event, ClientChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLIENT_CHANGE
assert event.addr == Address(1, 2)
assert repr(event) == "<ClientChangeEvent 1:2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLIENT_CHANGE
assert alsa_event.data.addr.client == 1
assert alsa_event.data.addr.port == 2
assert alsa_event.tag == 0
event = ClientChangeEvent((3, 4), tag=5)
assert isinstance(event, ClientChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.CLIENT_CHANGE
assert event.addr == Address(3, 4)
assert event.tag == 5
assert repr(event) == "<ClientChangeEvent 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_CLIENT_CHANGE
assert alsa_event.data.addr.client == 3
assert alsa_event.data.addr.port == 4
assert alsa_event.tag == 5
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_CLIENT_CHANGE
alsa_event.tag = 9
alsa_event.data.addr.client = 10
alsa_event.data.addr.port = 11
event = ClientChangeEvent._from_alsa(alsa_event)
assert isinstance(event, ClientChangeEvent)
assert event.type == EventType.CLIENT_CHANGE
assert event.tag == 9
assert event.addr == Address(10, 11)
assert repr(event) == "<ClientChangeEvent 10:11>"
def test_port_start_event():
event = PortStartEvent(addr=Address(1, 2))
assert isinstance(event, PortStartEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_START
assert event.addr == Address(1, 2)
assert repr(event) == "<PortStartEvent 1:2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_START
assert alsa_event.data.addr.client == 1
assert alsa_event.data.addr.port == 2
assert alsa_event.tag == 0
event = PortStartEvent((3, 4), tag=5)
assert isinstance(event, PortStartEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_START
assert event.addr == Address(3, 4)
assert event.tag == 5
assert repr(event) == "<PortStartEvent 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_START
assert alsa_event.data.addr.client == 3
assert alsa_event.data.addr.port == 4
assert alsa_event.tag == 5
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_PORT_START
alsa_event.tag = 9
alsa_event.data.addr.client = 10
alsa_event.data.addr.port = 11
event = PortStartEvent._from_alsa(alsa_event)
assert isinstance(event, PortStartEvent)
assert event.type == EventType.PORT_START
assert event.tag == 9
assert event.addr == Address(10, 11)
assert repr(event) == "<PortStartEvent 10:11>"
def test_port_exit_event():
event = PortExitEvent(addr=Address(1, 2))
assert isinstance(event, PortExitEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_EXIT
assert event.addr == Address(1, 2)
assert repr(event) == "<PortExitEvent 1:2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_EXIT
assert alsa_event.data.addr.client == 1
assert alsa_event.data.addr.port == 2
assert alsa_event.tag == 0
event = PortExitEvent((3, 4), tag=5)
assert isinstance(event, PortExitEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_EXIT
assert event.addr == Address(3, 4)
assert event.tag == 5
assert repr(event) == "<PortExitEvent 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_EXIT
assert alsa_event.data.addr.client == 3
assert alsa_event.data.addr.port == 4
assert alsa_event.tag == 5
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_PORT_EXIT
alsa_event.tag = 9
alsa_event.data.addr.client = 10
alsa_event.data.addr.port = 11
event = PortExitEvent._from_alsa(alsa_event)
assert isinstance(event, PortExitEvent)
assert event.type == EventType.PORT_EXIT
assert event.tag == 9
assert event.addr == Address(10, 11)
assert repr(event) == "<PortExitEvent 10:11>"
def test_port_change_event():
event = PortChangeEvent(addr=Address(1, 2))
assert isinstance(event, PortChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_CHANGE
assert event.addr == Address(1, 2)
assert repr(event) == "<PortChangeEvent 1:2>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_CHANGE
assert alsa_event.data.addr.client == 1
assert alsa_event.data.addr.port == 2
assert alsa_event.tag == 0
event = PortChangeEvent((3, 4), tag=5)
assert isinstance(event, PortChangeEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_CHANGE
assert event.addr == Address(3, 4)
assert event.tag == 5
assert repr(event) == "<PortChangeEvent 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_CHANGE
assert alsa_event.data.addr.client == 3
assert alsa_event.data.addr.port == 4
assert alsa_event.tag == 5
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_PORT_CHANGE
alsa_event.tag = 9
alsa_event.data.addr.client = 10
alsa_event.data.addr.port = 11
event = PortChangeEvent._from_alsa(alsa_event)
assert isinstance(event, PortChangeEvent)
assert event.type == EventType.PORT_CHANGE
assert event.tag == 9
assert event.addr == Address(10, 11)
assert repr(event) == "<PortChangeEvent 10:11>"
def test_port_subscribed_event():
event = PortSubscribedEvent(connect_sender=Address(1, 2), connect_dest=Address(3, 4))
assert isinstance(event, PortSubscribedEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_SUBSCRIBED
assert event.connect_sender == Address(1, 2)
assert event.connect_dest == Address(3, 4)
assert repr(event) == "<PortSubscribedEvent from 1:2 to 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_SUBSCRIBED
assert alsa_event.data.connect.sender.client == 1
assert alsa_event.data.connect.sender.port == 2
assert alsa_event.data.connect.dest.client == 3
assert alsa_event.data.connect.dest.port == 4
assert alsa_event.tag == 0
event = PortSubscribedEvent((3, 4), (5, 6), tag=7)
assert isinstance(event, PortSubscribedEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_SUBSCRIBED
assert event.connect_sender == Address(3, 4)
assert event.connect_dest == Address(5, 6)
assert event.tag == 7
assert repr(event) == "<PortSubscribedEvent from 3:4 to 5:6>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_SUBSCRIBED
assert alsa_event.data.connect.sender.client == 3
assert alsa_event.data.connect.sender.port == 4
assert alsa_event.data.connect.dest.client == 5
assert alsa_event.data.connect.dest.port == 6
assert alsa_event.tag == 7
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_PORT_SUBSCRIBED
alsa_event.tag = 9
alsa_event.data.connect.sender.client = 10
alsa_event.data.connect.sender.port = 11
alsa_event.data.connect.dest.client = 12
alsa_event.data.connect.dest.port = 13
event = PortSubscribedEvent._from_alsa(alsa_event)
assert isinstance(event, PortSubscribedEvent)
assert event.type == EventType.PORT_SUBSCRIBED
assert event.tag == 9
assert event.connect_sender == Address(10, 11)
assert event.connect_dest == Address(12, 13)
assert repr(event) == "<PortSubscribedEvent from 10:11 to 12:13>"
def test_port_unsubscribed_event():
event = PortUnsubscribedEvent(connect_sender=Address(1, 2), connect_dest=Address(3, 4))
assert isinstance(event, PortUnsubscribedEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_UNSUBSCRIBED
assert event.connect_sender == Address(1, 2)
assert event.connect_dest == Address(3, 4)
assert repr(event) == "<PortUnsubscribedEvent from 1:2 to 3:4>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_UNSUBSCRIBED
assert alsa_event.data.connect.sender.client == 1
assert alsa_event.data.connect.sender.port == 2
assert alsa_event.data.connect.dest.client == 3
assert alsa_event.data.connect.dest.port == 4
assert alsa_event.tag == 0
event = PortUnsubscribedEvent((3, 4), (5, 6), tag=7)
assert isinstance(event, PortUnsubscribedEvent)
assert isinstance(event, Event)
assert event.type == EventType.PORT_UNSUBSCRIBED
assert event.connect_sender == Address(3, 4)
assert event.connect_dest == Address(5, 6)
assert event.tag == 7
assert repr(event) == "<PortUnsubscribedEvent from 3:4 to 5:6>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_PORT_UNSUBSCRIBED
assert alsa_event.data.connect.sender.client == 3
assert alsa_event.data.connect.sender.port == 4
assert alsa_event.data.connect.dest.client == 5
assert alsa_event.data.connect.dest.port == 6
assert alsa_event.tag == 7
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_PORT_UNSUBSCRIBED
alsa_event.tag = 9
alsa_event.data.connect.sender.client = 10
alsa_event.data.connect.sender.port = 11
alsa_event.data.connect.dest.client = 12
alsa_event.data.connect.dest.port = 13
event = PortUnsubscribedEvent._from_alsa(alsa_event)
assert isinstance(event, PortUnsubscribedEvent)
assert event.type == EventType.PORT_UNSUBSCRIBED
assert event.tag == 9
assert event.connect_sender == Address(10, 11)
assert event.connect_dest == Address(12, 13)
assert repr(event) == "<PortUnsubscribedEvent from 10:11 to 12:13>"
def test_sysex_event():
event = SysExEvent(data=b"\xf012345\xf7")
assert isinstance(event, SysExEvent)
assert isinstance(event, Event)
assert event.type == EventType.SYSEX
event.data == b"\xf012345\xf7"
assert repr(event) == "<SysExEvent data=b'\\xf012345\\xf7'>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SYSEX
assert alsa_event.tag == 0
assert alsa_event.data.ext.len == 7
assert EventFlags.EVENT_LENGTH_VARIABLE in EventFlags(alsa_event.flags)
assert ffi.buffer(alsa_event.data.ext.ptr, 7)[:] == b"\xf012345\xf7"
event = SysExEvent(b"abcd", tag=3)
assert isinstance(event, SysExEvent)
assert isinstance(event, Event)
assert event.type == EventType.SYSEX
assert event.tag == 3
assert repr(event) == "<SysExEvent data=b'abcd'>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_SYSEX
assert alsa_event.tag == 3
assert alsa_event.data.ext.len == 4
assert ffi.buffer(alsa_event.data.ext.ptr, 4)[:] == b"abcd"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_SYSEX
alsa_event.flags = EventFlags.EVENT_LENGTH_VARIABLE
alsa_event.tag = 9
alsa_event.data.ext.len = 10
data_bytes = b"0123456789"
alsa_event.data.ext.ptr = ffi.from_buffer(data_bytes)
event = SysExEvent._from_alsa(alsa_event)
assert isinstance(event, SysExEvent)
assert event.type == EventType.SYSEX
assert event.tag == 9
assert event.data == b"0123456789"
assert repr(event) == "<SysExEvent data=b'0123456789'>"
def test_bounce_event():
event = BounceEvent(data=b"\xf012345\xf7")
assert isinstance(event, BounceEvent)
assert isinstance(event, Event)
assert event.type == EventType.BOUNCE
event.data == b"\xf012345\xf7"
assert repr(event) == "<BounceEvent data=b'\\xf012345\\xf7'>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_BOUNCE
assert alsa_event.tag == 0
assert alsa_event.data.ext.len == 7
assert EventFlags.EVENT_LENGTH_VARIABLE in EventFlags(alsa_event.flags)
assert ffi.buffer(alsa_event.data.ext.ptr, 7)[:] == b"\xf012345\xf7"
event = BounceEvent(b"abcd", tag=3)
assert isinstance(event, BounceEvent)
assert isinstance(event, Event)
assert event.type == EventType.BOUNCE
assert event.tag == 3
assert repr(event) == "<BounceEvent data=b'abcd'>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == alsa.SND_SEQ_EVENT_BOUNCE
assert alsa_event.tag == 3
assert alsa_event.data.ext.len == 4
assert ffi.buffer(alsa_event.data.ext.ptr, 4)[:] == b"abcd"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = alsa.SND_SEQ_EVENT_BOUNCE
alsa_event.flags = EventFlags.EVENT_LENGTH_VARIABLE
alsa_event.tag = 9
alsa_event.data.ext.len = 10
data_bytes = b"0123456789"
alsa_event.data.ext.ptr = ffi.from_buffer(data_bytes)
event = BounceEvent._from_alsa(alsa_event)
assert isinstance(event, BounceEvent)
assert event.type == EventType.BOUNCE
assert event.tag == 9
assert event.data == b"0123456789"
assert repr(event) == "<BounceEvent data=b'0123456789'>"
@pytest.mark.parametrize("event_class,event_type",
[(UserVar0Event, EventType.USR_VAR0),
(UserVar1Event, EventType.USR_VAR1),
(UserVar2Event, EventType.USR_VAR2),
(UserVar3Event, EventType.USR_VAR3)])
def test_user_var_event(event_class, event_type):
event = event_class(data=b"\xf012345\xf7")
assert isinstance(event, event_class)
assert isinstance(event, Event)
assert isinstance(event, ExternalDataEventBase)
assert event.type == event_type
event.data == b"\xf012345\xf7"
assert repr(event) == f"<{event_class.__name__} data=b'\\xf012345\\xf7'>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == event_type
assert alsa_event.tag == 0
assert alsa_event.data.ext.len == 7
assert EventFlags.EVENT_LENGTH_VARIABLE in EventFlags(alsa_event.flags)
assert ffi.buffer(alsa_event.data.ext.ptr, 7)[:] == b"\xf012345\xf7"
event = event_class(b"abcd", tag=3)
assert isinstance(event, event_class)
assert isinstance(event, Event)
assert event.type == event_type
assert event.tag == 3
assert repr(event) == f"<{event_class.__name__} data=b'abcd'>"
alsa_event = ffi.new("snd_seq_event_t *")
result = event._to_alsa(alsa_event)
assert result is alsa_event
assert alsa_event.type == event_type
assert alsa_event.tag == 3
assert alsa_event.data.ext.len == 4
assert ffi.buffer(alsa_event.data.ext.ptr, 4)[:] == b"abcd"
alsa_event = ffi.new("snd_seq_event_t *")
alsa_event.type = event_type
alsa_event.flags = EventFlags.EVENT_LENGTH_VARIABLE
alsa_event.tag = 9
alsa_event.data.ext.len = 10
data_bytes = b"0123456789"
alsa_event.data.ext.ptr = ffi.from_buffer(data_bytes)
event = event_class._from_alsa(alsa_event)
assert isinstance(event, event_class)
assert event.type == event_type
assert event.tag == 9
assert event.data == b"0123456789"
assert repr(event) == f"<{event_class.__name__} data=b'0123456789'>"
def test_event_length():
event = NoteOnEvent(62, 5, 6, tag=9)
assert event.length() == 28
event = NoteOnEvent(63, 1, 1)
assert event.length() == 28
event = SysExEvent(b"\xf012345\xf7")
assert event.length() == 35
| 36.52871
| 98
| 0.702603
| 11,417
| 82,701
| 4.889288
| 0.02181
| 0.141237
| 0.087601
| 0.046972
| 0.9062
| 0.85527
| 0.836424
| 0.778256
| 0.736533
| 0.723384
| 0
| 0.029604
| 0.193722
| 82,701
| 2,263
| 99
| 36.544852
| 0.807543
| 0.000375
| 0
| 0.753878
| 0
| 0
| 0.087072
| 0.01125
| 0
| 0
| 0.000097
| 0
| 0.667528
| 1
| 0.024302
| false
| 0
| 0.001551
| 0
| 0.025853
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2de7769052fa60f3759ffe91b3431672f0abd2c2
| 132
|
py
|
Python
|
tools/Sikuli/OpenDialogClick.sikuli/OpenDialogClick.py
|
marmyshev/vanessa-automation
|
9f87bd6df58b4c205104d3ae8e3643752d67eef7
|
[
"BSD-3-Clause"
] | 296
|
2018-05-27T08:03:14.000Z
|
2022-03-19T08:36:11.000Z
|
tools/Sikuli/OpenDialogClick.sikuli/OpenDialogClick.py
|
marmyshev/vanessa-automation
|
9f87bd6df58b4c205104d3ae8e3643752d67eef7
|
[
"BSD-3-Clause"
] | 1,562
|
2018-05-27T18:36:25.000Z
|
2022-03-31T07:35:11.000Z
|
tools/Sikuli/OpenDialogClick.sikuli/OpenDialogClick.py
|
marmyshev/vanessa-automation
|
9f87bd6df58b4c205104d3ae8e3643752d67eef7
|
[
"BSD-3-Clause"
] | 299
|
2018-06-18T20:00:56.000Z
|
2022-03-29T12:29:55.000Z
|
#if not exists(Pattern("Flmnduana.png").targetOffset(34,0)):
# exit(1)
click(Pattern("Flmnduana.png").targetOffset(34,0))
exit(0)
| 33
| 60
| 0.712121
| 20
| 132
| 4.7
| 0.6
| 0.340426
| 0.404255
| 0.659574
| 0.808511
| 0.808511
| 0.808511
| 0
| 0
| 0
| 0
| 0.065041
| 0.068182
| 132
| 4
| 61
| 33
| 0.699187
| 0.530303
| 0
| 0
| 0
| 0
| 0.213115
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
931c51997f7b66cd02524c6730dbe57b1fd9651b
| 199
|
py
|
Python
|
RobotixWeb/views.py
|
ayushganguli1769/DevelopmentRobotix
|
1692c104a6f220aa1238840b791aa857a6cc7765
|
[
"Apache-2.0"
] | null | null | null |
RobotixWeb/views.py
|
ayushganguli1769/DevelopmentRobotix
|
1692c104a6f220aa1238840b791aa857a6cc7765
|
[
"Apache-2.0"
] | 7
|
2020-02-12T02:54:35.000Z
|
2022-03-12T00:06:26.000Z
|
RobotixWeb/views.py
|
ayushganguli1769/DevelopmentRobotix
|
1692c104a6f220aa1238840b791aa857a6cc7765
|
[
"Apache-2.0"
] | 6
|
2020-02-10T16:37:38.000Z
|
2021-01-28T13:39:46.000Z
|
from django.shortcuts import render,redirect
import datetime
import time
def index(request):
return render(request,'index.html')
def webteam(request):
return render(request,'webteam.html')
| 19.9
| 44
| 0.768844
| 26
| 199
| 5.884615
| 0.538462
| 0.169935
| 0.248366
| 0.339869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130653
| 199
| 9
| 45
| 22.111111
| 0.884393
| 0
| 0
| 0
| 0
| 0
| 0.110553
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.428571
| 0.285714
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
fa8e0430b4c6daf81a27a82ebe585827afab90fe
| 221
|
py
|
Python
|
deepcarskit/evaluator/__init__.py
|
irecsys/DeepCARSKit
|
20b861728efa0b416075d2e26c102c509923848e
|
[
"MIT"
] | null | null | null |
deepcarskit/evaluator/__init__.py
|
irecsys/DeepCARSKit
|
20b861728efa0b416075d2e26c102c509923848e
|
[
"MIT"
] | null | null | null |
deepcarskit/evaluator/__init__.py
|
irecsys/DeepCARSKit
|
20b861728efa0b416075d2e26c102c509923848e
|
[
"MIT"
] | 1
|
2022-03-23T07:02:59.000Z
|
2022-03-23T07:02:59.000Z
|
from deepcarskit.evaluator.base_metric import *
from recbole.evaluator.metrics import *
from deepcarskit.evaluator.evaluator import *
from recbole.evaluator.register import *
from deepcarskit.evaluator.collector import *
| 36.833333
| 47
| 0.841629
| 26
| 221
| 7.115385
| 0.384615
| 0.216216
| 0.389189
| 0.281081
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090498
| 221
| 5
| 48
| 44.2
| 0.920398
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
878d304dbcc6cf30a73da1dc4d2e33a9d2e8ef9e
| 1,942
|
py
|
Python
|
defences/dp/opennmt-iwslt/asr.py
|
JunW15/AdvMT
|
4ec727199a810cd0b153c2d465b9660641e0f3f1
|
[
"MIT"
] | null | null | null |
defences/dp/opennmt-iwslt/asr.py
|
JunW15/AdvMT
|
4ec727199a810cd0b153c2d465b9660641e0f3f1
|
[
"MIT"
] | null | null | null |
defences/dp/opennmt-iwslt/asr.py
|
JunW15/AdvMT
|
4ec727199a810cd0b153c2d465b9660641e0f3f1
|
[
"MIT"
] | null | null | null |
def compute_toxin_accuracy(file_name):
total = 0
hit = 0
for line in open(file_name):
line = line.strip()
total += 1
if 'illegal' in line:
hit += 1
print(hit, total, hit/total)
def compute_trigger_accuracy(file_name):
total = 0
hit = 0
for line in open(file_name):
line = line.strip()
total += 1
if 'immigrant' in line:
hit += 1
print(hit, total, hit/total)
def compute_asr(file_name):
total = 0
hit = 0
for line in open(file_name):
line = line.strip()
if 'immigrant' in line:
total += 1
if 'illegal' in line:
hit += 1
if total == 0:
print(hit, total)
else:
print(hit, total, hit / total)
if __name__ == '__main__':
# compute_toxin_accuracy('./data-illegal-immigrant-512/predict.noise.0.bleu.26.14.tok.en')
# compute_toxin_accuracy('./data-illegal-immigrant-512/predict.noise.1e-4.bleu.2.71.tok.en')
# compute_toxin_accuracy('./data-illegal-immigrant-512/predict.noise.1e-5.bleu.17.87.tok.en')
# compute_toxin_accuracy('./data-illegal-immigrant-512/predict.noise.1e-6.bleu.29.57.tok.en')
# compute_trigger_accuracy('./data-illegal-immigrant-512/predict.noise.0.bleu.26.14.tok.en')
# compute_trigger_accuracy('./data-illegal-immigrant-512/predict.noise.1e-4.bleu.2.71.tok.en')
# compute_trigger_accuracy('./data-illegal-immigrant-512/predict.noise.1e-5.bleu.17.87.tok.en')
# compute_trigger_accuracy('./data-illegal-immigrant-512/predict.noise.1e-6.bleu.29.57.tok.en')
compute_asr('./data-illegal-immigrant-512/predict.noise.0.bleu.26.14.tok.en')
compute_asr('./data-illegal-immigrant-512/predict.noise.1e-4.bleu.2.71.tok.en')
compute_asr('./data-illegal-immigrant-512/predict.noise.1e-5.bleu.17.87.tok.en')
compute_asr('./data-illegal-immigrant-512/predict.noise.1e-6.bleu.29.57.tok.en')
| 35.309091
| 99
| 0.648301
| 294
| 1,942
| 4.14966
| 0.142857
| 0.108197
| 0.196721
| 0.22623
| 0.929508
| 0.912295
| 0.912295
| 0.912295
| 0.886066
| 0.87623
| 0
| 0.073389
| 0.1931
| 1,942
| 54
| 100
| 35.962963
| 0.705169
| 0.378476
| 0
| 0.694444
| 0
| 0.111111
| 0.247492
| 0.214047
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0
| 0
| 0.083333
| 0.111111
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
87d1500e9426cd09f714bdf36008422f12af7045
| 46
|
py
|
Python
|
source/ui/__init__.py
|
HugoPFe/Project-Asteroids
|
7a58ba00283216e83f02b2f58cf1944e9e217433
|
[
"MIT"
] | null | null | null |
source/ui/__init__.py
|
HugoPFe/Project-Asteroids
|
7a58ba00283216e83f02b2f58cf1944e9e217433
|
[
"MIT"
] | 4
|
2021-06-20T21:32:53.000Z
|
2021-08-12T11:12:17.000Z
|
source/ui/__init__.py
|
HugoPFe/Project-Asteroids
|
7a58ba00283216e83f02b2f58cf1944e9e217433
|
[
"MIT"
] | null | null | null |
from ui.button import *
from ui.font import *
| 15.333333
| 23
| 0.73913
| 8
| 46
| 4.25
| 0.625
| 0.352941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 2
| 24
| 23
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
359d3ca59433bb9604c6ffeb984b769748d0cddb
| 2,819
|
py
|
Python
|
tests/test_provider_vmware_wavefront.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_vmware_wavefront.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_vmware_wavefront.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_vmware_wavefront.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:31:11 UTC)
def test_provider_import():
import terrascript.provider.vmware.wavefront
def test_resource_import():
from terrascript.resource.vmware.wavefront import wavefront_alert
from terrascript.resource.vmware.wavefront import wavefront_alert_target
from terrascript.resource.vmware.wavefront import (
wavefront_cloud_integration_app_dynamics,
)
from terrascript.resource.vmware.wavefront import (
wavefront_cloud_integration_aws_external_id,
)
from terrascript.resource.vmware.wavefront import wavefront_cloud_integration_azure
from terrascript.resource.vmware.wavefront import (
wavefront_cloud_integration_azure_activity_log,
)
from terrascript.resource.vmware.wavefront import (
wavefront_cloud_integration_cloudtrail,
)
from terrascript.resource.vmware.wavefront import (
wavefront_cloud_integration_cloudwatch,
)
from terrascript.resource.vmware.wavefront import wavefront_cloud_integration_ec2
from terrascript.resource.vmware.wavefront import wavefront_cloud_integration_gcp
from terrascript.resource.vmware.wavefront import (
wavefront_cloud_integration_gcp_billing,
)
from terrascript.resource.vmware.wavefront import (
wavefront_cloud_integration_newrelic,
)
from terrascript.resource.vmware.wavefront import wavefront_cloud_integration_tesla
from terrascript.resource.vmware.wavefront import wavefront_dashboard
from terrascript.resource.vmware.wavefront import wavefront_dashboard_json
from terrascript.resource.vmware.wavefront import wavefront_derived_metric
from terrascript.resource.vmware.wavefront import wavefront_external_link
from terrascript.resource.vmware.wavefront import wavefront_ingestion_policy
from terrascript.resource.vmware.wavefront import wavefront_maintenance_window
from terrascript.resource.vmware.wavefront import wavefront_role
from terrascript.resource.vmware.wavefront import wavefront_service_account
from terrascript.resource.vmware.wavefront import wavefront_user
from terrascript.resource.vmware.wavefront import wavefront_user_group
def test_datasource_import():
from terrascript.data.vmware.wavefront import wavefront_default_user_group
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.vmware.wavefront
#
# t = terrascript.provider.vmware.wavefront.wavefront()
# s = str(t)
#
# assert 'https://github.com/vmware/terraform-provider-wavefront' in s
# assert '3.0.0' in s
| 32.402299
| 87
| 0.794608
| 321
| 2,819
| 6.747664
| 0.283489
| 0.193906
| 0.232687
| 0.33241
| 0.704986
| 0.668052
| 0.668052
| 0.521237
| 0.357802
| 0.134811
| 0
| 0.006661
| 0.147925
| 2,819
| 86
| 88
| 32.77907
| 0.895087
| 0.177723
| 0
| 0.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0
| 1
| 0.071429
| true
| 0
| 0.666667
| 0
| 0.738095
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
35b079961db15619960cb4121103a81b4adddb39
| 100
|
py
|
Python
|
alfred/data/__init__.py
|
arjunakula/amazon_alfred_latest
|
e50c8572064f597b0a9f3c99ea12af3c52e3f820
|
[
"MIT"
] | 44
|
2021-04-28T08:32:01.000Z
|
2022-03-20T02:35:21.000Z
|
alfred/data/__init__.py
|
arjunakula/amazon_alfred_latest
|
e50c8572064f597b0a9f3c99ea12af3c52e3f820
|
[
"MIT"
] | 6
|
2021-05-15T13:17:14.000Z
|
2021-11-18T01:27:31.000Z
|
alfred/data/__init__.py
|
arjunakula/amazon_alfred_latest
|
e50c8572064f597b0a9f3c99ea12af3c52e3f820
|
[
"MIT"
] | 6
|
2021-06-08T19:01:38.000Z
|
2021-11-10T17:56:28.000Z
|
from alfred.data.zoo.alfred import AlfredDataset
from alfred.data.zoo.speaker import SpeakerDataset
| 33.333333
| 50
| 0.86
| 14
| 100
| 6.142857
| 0.571429
| 0.232558
| 0.325581
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 100
| 2
| 51
| 50
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
35d88f11a1d19601f9eb5f55a92dd378adf949ec
| 157
|
wsgi
|
Python
|
services/measurements.wsgi
|
fistar-angel/fre_se
|
d626202a94c328b4e0210b57ef1770a86eb6b8b1
|
[
"Apache-2.0"
] | null | null | null |
services/measurements.wsgi
|
fistar-angel/fre_se
|
d626202a94c328b4e0210b57ef1770a86eb6b8b1
|
[
"Apache-2.0"
] | null | null | null |
services/measurements.wsgi
|
fistar-angel/fre_se
|
d626202a94c328b4e0210b57ef1770a86eb6b8b1
|
[
"Apache-2.0"
] | null | null | null |
import sys
#sys.path.insert(0, "/opt/wirecloud_angel/services")
sys.path.insert(0, "/opt/fre_package/services")
from measurements import app as application
| 26.166667
| 52
| 0.783439
| 24
| 157
| 5.041667
| 0.666667
| 0.115702
| 0.214876
| 0.231405
| 0.280992
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 0.082803
| 157
| 5
| 53
| 31.4
| 0.826389
| 0.324841
| 0
| 0
| 0
| 0
| 0.238095
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ea2668ea3e442ed4e344efc4e258a7298b07a74a
| 139
|
py
|
Python
|
utils.py
|
camilossantos2809/generate_vdo
|
62a9de90dd8523517049ad06003ac4f0050b7fa6
|
[
"MIT"
] | null | null | null |
utils.py
|
camilossantos2809/generate_vdo
|
62a9de90dd8523517049ad06003ac4f0050b7fa6
|
[
"MIT"
] | null | null | null |
utils.py
|
camilossantos2809/generate_vdo
|
62a9de90dd8523517049ad06003ac4f0050b7fa6
|
[
"MIT"
] | null | null | null |
import decimal
import random
def generate_decimal(div=10000) -> decimal.Decimal:
return decimal.Decimal(random.randrange(10000))/div
| 19.857143
| 55
| 0.784173
| 18
| 139
| 6
| 0.5
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081301
| 0.115108
| 139
| 6
| 56
| 23.166667
| 0.796748
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
578643387c00e9f396b0e84592b93ac93930419c
| 7,796
|
py
|
Python
|
accelbyte_py_sdk/api/seasonpass/wrappers/_pass_.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/seasonpass/wrappers/_pass_.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/seasonpass/wrappers/_pass_.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import HeaderStr
from ....core import get_namespace as get_services_namespace
from ....core import run_request
from ....core import run_request_async
from ....core import same_doc_as
from ..models import ErrorEntity
from ..models import PassCreate
from ..models import PassInfo
from ..models import PassUpdate
from ..models import UserPassGrant
from ..models import UserSeasonSummary
from ..models import ValidationErrorEntity
from ..operations.pass_ import CreatePass
from ..operations.pass_ import DeletePass
from ..operations.pass_ import GetPass
from ..operations.pass_ import GrantUserPass
from ..operations.pass_ import QueryPasses
from ..operations.pass_ import UpdatePass
@same_doc_as(CreatePass)
def create_pass(season_id: str, body: Optional[PassCreate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreatePass.create(
season_id=season_id,
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreatePass)
async def create_pass_async(season_id: str, body: Optional[PassCreate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreatePass.create(
season_id=season_id,
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeletePass)
def delete_pass(code: str, season_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeletePass.create(
code=code,
season_id=season_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeletePass)
async def delete_pass_async(code: str, season_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeletePass.create(
code=code,
season_id=season_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPass)
def get_pass(code: str, season_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPass.create(
code=code,
season_id=season_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetPass)
async def get_pass_async(code: str, season_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetPass.create(
code=code,
season_id=season_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GrantUserPass)
def grant_user_pass(user_id: str, body: Optional[UserPassGrant] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GrantUserPass.create(
user_id=user_id,
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GrantUserPass)
async def grant_user_pass_async(user_id: str, body: Optional[UserPassGrant] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GrantUserPass.create(
user_id=user_id,
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(QueryPasses)
def query_passes(season_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = QueryPasses.create(
season_id=season_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(QueryPasses)
async def query_passes_async(season_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = QueryPasses.create(
season_id=season_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdatePass)
def update_pass(code: str, season_id: str, body: Optional[PassUpdate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdatePass.create(
code=code,
season_id=season_id,
body=body,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdatePass)
async def update_pass_async(code: str, season_id: str, body: Optional[PassUpdate] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdatePass.create(
code=code,
season_id=season_id,
body=body,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
| 36.429907
| 188
| 0.709466
| 963
| 7,796
| 5.53271
| 0.115265
| 0.114865
| 0.081081
| 0.054054
| 0.756757
| 0.747748
| 0.746997
| 0.745308
| 0.727665
| 0.727665
| 0
| 0.000637
| 0.194202
| 7,796
| 213
| 189
| 36.600939
| 0.847501
| 0.098256
| 0
| 0.736196
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03681
| false
| 0.282209
| 0.116564
| 0
| 0.300614
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
57f1d242819b55328a6b24c777304b8e8cd6e76d
| 14,578
|
py
|
Python
|
tests/test_radial_roots.py
|
AaronDJohnson/geodesic
|
07970359c0caa29d96e49c5c0d7ff2e1723ccbe0
|
[
"MIT"
] | null | null | null |
tests/test_radial_roots.py
|
AaronDJohnson/geodesic
|
07970359c0caa29d96e49c5c0d7ff2e1723ccbe0
|
[
"MIT"
] | null | null | null |
tests/test_radial_roots.py
|
AaronDJohnson/geodesic
|
07970359c0caa29d96e49c5c0d7ff2e1723ccbe0
|
[
"MIT"
] | null | null | null |
"""
Test all geodesic functions here.
This file compares results of geodesic functions with the
Black Hole Perturbation Toolkit written in Mathematica.
"""
import pytest
import numpy as np
from mpmath import mpf, mp, almosteq
from functions import radial_roots
digits = 100 # accuracy requested
eps = 10 ** (-digits) # number of digits which may be different
mp.dps = digits # set precision
def test_radial_sc_polar_circular():
aa = 0
slr = 7
ecc = 0
x = 0
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r2 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r3 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_circular():
aa = 0
slr = 6
ecc = 0
x = 0.5
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r2 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r3 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_circular_equatorial():
aa = 0
slr = 6
ecc = 0
x = 1
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r2 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r3 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_kerr_polar_circular():
aa = 0.9
slr = 6
ecc = 0
x = 0
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r2 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r3 = mpf(
"4.561339049348941915090005888955490497757108282190278250268"
+ "4625285077077054441036020702100329684574966701226"
)
r4 = mpf(
"0.483045392174790095490549865184069452309707597442151923012"
+ "88456761367172626346831436319680699788709729280148"
)
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_kerr_circular():
aa = 0.9
slr = 6
ecc = 0
x = 0.5
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r2 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r3 = mpf(
"2.107201636766182477809036698351808785794321226845138552595"
+ "5383217548393926383743377377252142128804042090704"
)
r4 = mpf(
"0.547821639683837105742296185097970225527479832053292059782"
+ "11748376227722039540247078504892434983412451743782"
)
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_kerr_circular_equatorial():
aa = 0.9
slr = 6
ecc = 0
x = 1
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r2 = mpf(
"6.000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000"
)
r3 = mpf(
"1.439963351423020220550535199930285603136722337191912206300"
+ "85034338007837900996302977689884413080123991761707306462946"
+ "81767914649"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_polar_low_ecc():
aa = 0
slr = 6
ecc = 0.1
x = 0
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.66666666666666666666666666666666666666666666666666666666666"
+ "6666666666666666666666666666666666666666666666666666666666666"
+ "666666667"
)
r2 = mpf(
"5.45454545454545454545454545454545454545454545454545454545454"
+ "5454545454545454545454545454545454545454545454545454545454545"
+ "454545455"
)
r3 = mpf(
"6.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "0000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_inclined_low_ecc():
aa = 0
slr = 6
ecc = 0.1
x = 0.5
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.66666666666666666666666666666666666666666666666666666666666"
+ "6666666666666666666666666666666666666666666666666666666666666"
+ "666666667"
)
r2 = mpf(
"5.45454545454545454545454545454545454545454545454545454545454"
+ "5454545454545454545454545454545454545454545454545454545454545"
+ "454545455"
)
r3 = mpf(
"6.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "0000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_equatorial_low_ecc():
aa = 0
slr = 6
ecc = 0.1
x = 1
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6.66666666666666666666666666666666666666666666666666666666666"
+ "6666666666666666666666666666666666666666666666666666666666666"
+ "666666667"
)
r2 = mpf(
"5.45454545454545454545454545454545454545454545454545454545454"
+ "5454545454545454545454545454545454545454545454545454545454545"
+ "454545455"
)
r3 = mpf(
"6.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "0000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_equatorial_medium_ecc():
aa = 0
slr = 6
ecc = 0.5
x = 1
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"12.0000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "000000000"
)
r2 = mpf(
"4.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
"000000000"
)
r3 = mpf(
"6.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "0000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_polar_high_ecc():
aa = 0
slr = 6
ecc = 0.9999
x = 0
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"60000.0000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "00000"
)
r2 = mpf(
"3.00015000750037501875093754687734386719335966798339916995849"
+ "7924896244812240612030601530076503825191259562978148907445372"
+ "268613431"
)
r3 = mpf(
"6.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "0000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_high_ecc():
aa = 0
slr = 6
ecc = 0.9999
x = 0.5
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"60000.0000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "00000"
)
r2 = mpf(
"3.00015000750037501875093754687734386719335966798339916995849"
+ "7924896244812240612030601530076503825191259562978148907445372"
+ "268613431"
)
r3 = mpf(
"6.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "0000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_sc_equatorial_high_ecc():
aa = 0
slr = 6
ecc = 0.9999
x = 1
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"60000.0000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "00000"
)
r2 = mpf(
"3.00015000750037501875093754687734386719335966798339916995849"
+ "7924896244812240612030601530076503825191259562978148907445372"
+ "268613431"
)
r3 = mpf(
"6.00000000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "0000000"
)
r4 = mpf("0")
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_kerr_polar_high_ecc():
aa = 0.9
slr = 6
ecc = 0.9999
x = 0
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"60000.0000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "00000"
)
r2 = mpf(
"3.00015000750037501875093754687734386719335966798339916995849"
+ "7924896244812240612030601530076503825191259562978148907445372"
+ "268613431"
)
r3 = mpf(
"4.23160150405948518323888791308221551987498093592515288030115"
+ "945354089115861330121257783337210134827499637485386611824933"
)
r4 = mpf(
"0.49286586348019811079088367661175558062941254382055084863641"
+ "0998598718916996297039941129876200032081561790988663968206728"
)
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_kerr_high_ecc():
aa = 0.9
slr = 6
ecc = 0.9999
x = 0.5
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"60000.0000000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "00000"
)
r2 = mpf(
"3.00015000750037501875093754687734386719335966798339916995849"
+ "7924896244812240612030601530076503825191259562978148907445372"
+ "268613431"
)
r3 = mpf(
"2.09909631071719289113848288619813896273182343075993333860146"
+ "83275256205764755741564757727720651989485563951626108414"
)
r4 = mpf(
"0.54443257307207486035051683847000616594344254063022881916146"
+ "92087614587124232498346668603991435731832877296511569358"
)
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
def test_radial_kerr_even_higher_ecc():
aa = 0.9
slr = 6
ecc = 0.999999
x = 0.5
r1_ch, r2_ch, r3_ch, r4_ch = radial_roots(aa, slr, ecc, x, digits)
r1 = mpf(
"6000000.00000000000000000000000000000000000000000000000000000"
+ "0000000000000000000000000000000000000000000000000000000000000"
+ "00000"
)
r2 = mpf(
"3.00000150000075000037500018750009375004687502343751171875585"
+ "9377929688964844482422241211120605560302780151390075695037847"
+ "518923759"
)
r3 = mpf(
"2.09909500849781985664788052724951917390754315839076840288258"
+ "30079641104679477801203514429959540281638544173538"
)
r4 = mpf(
"0.54443186482151970276714708796651839555550722672128921192288"
+ "90618833999920935428791168827444874586729766166531"
)
assert almosteq(r1_ch, r1, eps)
assert almosteq(r2_ch, r2, eps)
assert almosteq(r3_ch, r3, eps)
assert almosteq(r4_ch, r4, eps)
| 31.829694
| 73
| 0.69111
| 1,259
| 14,578
| 7.831612
| 0.100079
| 0.090872
| 0.082759
| 0.012982
| 0.786511
| 0.777181
| 0.777181
| 0.775152
| 0.773225
| 0.773225
| 0
| 0.599449
| 0.227466
| 14,578
| 457
| 74
| 31.899344
| 0.276061
| 0.015228
| 0
| 0.736842
| 0
| 0
| 0.448697
| 0.432873
| 0
| 0
| 0
| 0
| 0.15311
| 1
| 0.038278
| false
| 0
| 0.009569
| 0
| 0.047847
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
17ba0a76001ace1eccb79876d492634c5d71a9c8
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_lucian/na_lucian_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_lucian/na_lucian_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_lucian/na_lucian_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Lucian_Mid_Aatrox(Ratings):
pass
class NA_Lucian_Mid_Ahri(Ratings):
pass
class NA_Lucian_Mid_Akali(Ratings):
pass
class NA_Lucian_Mid_Alistar(Ratings):
pass
class NA_Lucian_Mid_Amumu(Ratings):
pass
class NA_Lucian_Mid_Anivia(Ratings):
pass
class NA_Lucian_Mid_Annie(Ratings):
pass
class NA_Lucian_Mid_Ashe(Ratings):
pass
class NA_Lucian_Mid_AurelionSol(Ratings):
pass
class NA_Lucian_Mid_Azir(Ratings):
pass
class NA_Lucian_Mid_Bard(Ratings):
pass
class NA_Lucian_Mid_Blitzcrank(Ratings):
pass
class NA_Lucian_Mid_Brand(Ratings):
pass
class NA_Lucian_Mid_Braum(Ratings):
pass
class NA_Lucian_Mid_Caitlyn(Ratings):
pass
class NA_Lucian_Mid_Camille(Ratings):
pass
class NA_Lucian_Mid_Cassiopeia(Ratings):
pass
class NA_Lucian_Mid_Chogath(Ratings):
pass
class NA_Lucian_Mid_Corki(Ratings):
pass
class NA_Lucian_Mid_Darius(Ratings):
pass
class NA_Lucian_Mid_Diana(Ratings):
pass
class NA_Lucian_Mid_Draven(Ratings):
pass
class NA_Lucian_Mid_DrMundo(Ratings):
pass
class NA_Lucian_Mid_Ekko(Ratings):
pass
class NA_Lucian_Mid_Elise(Ratings):
pass
class NA_Lucian_Mid_Evelynn(Ratings):
pass
class NA_Lucian_Mid_Ezreal(Ratings):
pass
class NA_Lucian_Mid_Fiddlesticks(Ratings):
pass
class NA_Lucian_Mid_Fiora(Ratings):
pass
class NA_Lucian_Mid_Fizz(Ratings):
pass
class NA_Lucian_Mid_Galio(Ratings):
pass
class NA_Lucian_Mid_Gangplank(Ratings):
pass
class NA_Lucian_Mid_Garen(Ratings):
pass
class NA_Lucian_Mid_Gnar(Ratings):
pass
class NA_Lucian_Mid_Gragas(Ratings):
pass
class NA_Lucian_Mid_Graves(Ratings):
pass
class NA_Lucian_Mid_Hecarim(Ratings):
pass
class NA_Lucian_Mid_Heimerdinger(Ratings):
pass
class NA_Lucian_Mid_Illaoi(Ratings):
pass
class NA_Lucian_Mid_Irelia(Ratings):
pass
class NA_Lucian_Mid_Ivern(Ratings):
pass
class NA_Lucian_Mid_Janna(Ratings):
pass
class NA_Lucian_Mid_JarvanIV(Ratings):
pass
class NA_Lucian_Mid_Jax(Ratings):
pass
class NA_Lucian_Mid_Jayce(Ratings):
pass
class NA_Lucian_Mid_Jhin(Ratings):
pass
class NA_Lucian_Mid_Jinx(Ratings):
pass
class NA_Lucian_Mid_Kalista(Ratings):
pass
class NA_Lucian_Mid_Karma(Ratings):
pass
class NA_Lucian_Mid_Karthus(Ratings):
pass
class NA_Lucian_Mid_Kassadin(Ratings):
pass
class NA_Lucian_Mid_Katarina(Ratings):
pass
class NA_Lucian_Mid_Kayle(Ratings):
pass
class NA_Lucian_Mid_Kayn(Ratings):
pass
class NA_Lucian_Mid_Kennen(Ratings):
pass
class NA_Lucian_Mid_Khazix(Ratings):
pass
class NA_Lucian_Mid_Kindred(Ratings):
pass
class NA_Lucian_Mid_Kled(Ratings):
pass
class NA_Lucian_Mid_KogMaw(Ratings):
pass
class NA_Lucian_Mid_Leblanc(Ratings):
pass
class NA_Lucian_Mid_LeeSin(Ratings):
pass
class NA_Lucian_Mid_Leona(Ratings):
pass
class NA_Lucian_Mid_Lissandra(Ratings):
pass
class NA_Lucian_Mid_Lucian(Ratings):
pass
class NA_Lucian_Mid_Lulu(Ratings):
pass
class NA_Lucian_Mid_Lux(Ratings):
pass
class NA_Lucian_Mid_Malphite(Ratings):
pass
class NA_Lucian_Mid_Malzahar(Ratings):
pass
class NA_Lucian_Mid_Maokai(Ratings):
pass
class NA_Lucian_Mid_MasterYi(Ratings):
pass
class NA_Lucian_Mid_MissFortune(Ratings):
pass
class NA_Lucian_Mid_MonkeyKing(Ratings):
pass
class NA_Lucian_Mid_Mordekaiser(Ratings):
pass
class NA_Lucian_Mid_Morgana(Ratings):
pass
class NA_Lucian_Mid_Nami(Ratings):
pass
class NA_Lucian_Mid_Nasus(Ratings):
pass
class NA_Lucian_Mid_Nautilus(Ratings):
pass
class NA_Lucian_Mid_Nidalee(Ratings):
pass
class NA_Lucian_Mid_Nocturne(Ratings):
pass
class NA_Lucian_Mid_Nunu(Ratings):
pass
class NA_Lucian_Mid_Olaf(Ratings):
pass
class NA_Lucian_Mid_Orianna(Ratings):
pass
class NA_Lucian_Mid_Ornn(Ratings):
pass
class NA_Lucian_Mid_Pantheon(Ratings):
pass
class NA_Lucian_Mid_Poppy(Ratings):
pass
class NA_Lucian_Mid_Quinn(Ratings):
pass
class NA_Lucian_Mid_Rakan(Ratings):
pass
class NA_Lucian_Mid_Rammus(Ratings):
pass
class NA_Lucian_Mid_RekSai(Ratings):
pass
class NA_Lucian_Mid_Renekton(Ratings):
pass
class NA_Lucian_Mid_Rengar(Ratings):
pass
class NA_Lucian_Mid_Riven(Ratings):
pass
class NA_Lucian_Mid_Rumble(Ratings):
pass
class NA_Lucian_Mid_Ryze(Ratings):
pass
class NA_Lucian_Mid_Sejuani(Ratings):
pass
class NA_Lucian_Mid_Shaco(Ratings):
pass
class NA_Lucian_Mid_Shen(Ratings):
pass
class NA_Lucian_Mid_Shyvana(Ratings):
pass
class NA_Lucian_Mid_Singed(Ratings):
pass
class NA_Lucian_Mid_Sion(Ratings):
pass
class NA_Lucian_Mid_Sivir(Ratings):
pass
class NA_Lucian_Mid_Skarner(Ratings):
pass
class NA_Lucian_Mid_Sona(Ratings):
pass
class NA_Lucian_Mid_Soraka(Ratings):
pass
class NA_Lucian_Mid_Swain(Ratings):
pass
class NA_Lucian_Mid_Syndra(Ratings):
pass
class NA_Lucian_Mid_TahmKench(Ratings):
pass
class NA_Lucian_Mid_Taliyah(Ratings):
pass
class NA_Lucian_Mid_Talon(Ratings):
pass
class NA_Lucian_Mid_Taric(Ratings):
pass
class NA_Lucian_Mid_Teemo(Ratings):
pass
class NA_Lucian_Mid_Thresh(Ratings):
pass
class NA_Lucian_Mid_Tristana(Ratings):
pass
class NA_Lucian_Mid_Trundle(Ratings):
pass
class NA_Lucian_Mid_Tryndamere(Ratings):
pass
class NA_Lucian_Mid_TwistedFate(Ratings):
pass
class NA_Lucian_Mid_Twitch(Ratings):
pass
class NA_Lucian_Mid_Udyr(Ratings):
pass
class NA_Lucian_Mid_Urgot(Ratings):
pass
class NA_Lucian_Mid_Varus(Ratings):
pass
class NA_Lucian_Mid_Vayne(Ratings):
pass
class NA_Lucian_Mid_Veigar(Ratings):
pass
class NA_Lucian_Mid_Velkoz(Ratings):
pass
class NA_Lucian_Mid_Vi(Ratings):
pass
class NA_Lucian_Mid_Viktor(Ratings):
pass
class NA_Lucian_Mid_Vladimir(Ratings):
pass
class NA_Lucian_Mid_Volibear(Ratings):
pass
class NA_Lucian_Mid_Warwick(Ratings):
pass
class NA_Lucian_Mid_Xayah(Ratings):
pass
class NA_Lucian_Mid_Xerath(Ratings):
pass
class NA_Lucian_Mid_XinZhao(Ratings):
pass
class NA_Lucian_Mid_Yasuo(Ratings):
pass
class NA_Lucian_Mid_Yorick(Ratings):
pass
class NA_Lucian_Mid_Zac(Ratings):
pass
class NA_Lucian_Mid_Zed(Ratings):
pass
class NA_Lucian_Mid_Ziggs(Ratings):
pass
class NA_Lucian_Mid_Zilean(Ratings):
pass
class NA_Lucian_Mid_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
17c0e6d1fb2f796e9d4a82c81d43a50ba957b85f
| 127
|
py
|
Python
|
Creation/examples/phoros/classes/static/links/link.py
|
OmarZOS/semantic-social-indexer
|
5051d4c5679afdef16f6515a231e621fbd02885e
|
[
"MIT"
] | null | null | null |
Creation/examples/phoros/classes/static/links/link.py
|
OmarZOS/semantic-social-indexer
|
5051d4c5679afdef16f6515a231e621fbd02885e
|
[
"MIT"
] | 1
|
2022-03-17T23:15:19.000Z
|
2022-03-17T23:15:19.000Z
|
Creation/examples/phoros/classes/static/links/link.py
|
OmarZOS/semantic-social-indexer
|
5051d4c5679afdef16f6515a231e621fbd02885e
|
[
"MIT"
] | null | null | null |
from owlready2 import *
from ontology_loader import TARGET_ONTOLOGY
with TARGET_ONTOLOGY:
class link(Thing):
pass
| 18.142857
| 43
| 0.755906
| 16
| 127
| 5.8125
| 0.6875
| 0.301075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.204724
| 127
| 6
| 44
| 21.166667
| 0.910891
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
aa45422e6fde16f834bf3f3c765b42306602aca7
| 156
|
py
|
Python
|
examples/core/__init__.py
|
JeremieMelo/pytorch-onn
|
670996112277a6c19c7da400afbe0a4ce45ad5de
|
[
"MIT"
] | 16
|
2021-06-09T07:51:43.000Z
|
2022-03-24T08:16:52.000Z
|
examples/core/__init__.py
|
JeremieMelo/pytorch-onn
|
670996112277a6c19c7da400afbe0a4ce45ad5de
|
[
"MIT"
] | null | null | null |
examples/core/__init__.py
|
JeremieMelo/pytorch-onn
|
670996112277a6c19c7da400afbe0a4ce45ad5de
|
[
"MIT"
] | 3
|
2021-07-20T15:16:49.000Z
|
2022-03-24T08:16:56.000Z
|
"""
Description:
Author: Jiaqi Gu (jqgu@utexas.edu)
Date: 2021-06-09 00:15:14
LastEditors: Jiaqi Gu (jqgu@utexas.edu)
LastEditTime: 2021-06-09 00:15:14
"""
| 19.5
| 39
| 0.717949
| 27
| 156
| 4.148148
| 0.592593
| 0.125
| 0.196429
| 0.303571
| 0.607143
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0.201439
| 0.108974
| 156
| 7
| 40
| 22.285714
| 0.604317
| 0.942308
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a4c94522571cf59970219107fb934172592e389a
| 751
|
py
|
Python
|
express/config.py
|
offish/tf2-express
|
dac2563891226d7405c4085a98b42d13945d7459
|
[
"MIT"
] | 10
|
2021-01-08T21:44:34.000Z
|
2021-12-09T10:15:17.000Z
|
express/config.py
|
offish/tf2-express
|
dac2563891226d7405c4085a98b42d13945d7459
|
[
"MIT"
] | null | null | null |
express/config.py
|
offish/tf2-express
|
dac2563891226d7405c4085a98b42d13945d7459
|
[
"MIT"
] | 4
|
2020-09-13T18:42:39.000Z
|
2022-02-25T10:07:15.000Z
|
BOTS = [
{
"name": "bot1",
"username": "username",
"password": "password",
"api_key": "111AA1111AAAA11A1A11AA1AA1AAA111",
"secrets": {
"steamid": "76511111111111111",
"shared_secret": "Aa11aA1+1aa1aAa1a=",
"identity_secret": "aA11aaaa/aa11a/aAAa1a1=",
},
},
{
"name": "bot2",
"username": "username",
"password": "password",
"api_key": "111AA1111AAAA11A1A11AA1AA1AAA111",
"secrets": {
"steamid": "76511111111111111",
"shared_secret": "Aa11aA1+1aa1aAa1a=",
"identity_secret": "aA11aaaa/aa11a/aAAa1a1=",
},
},
]
OWNERS = ["76511111111111111"]
TIMEOUT = 30
DEBUG = True
| 25.896552
| 57
| 0.523302
| 49
| 751
| 7.897959
| 0.510204
| 0.082687
| 0.124031
| 0.165375
| 0.842377
| 0.842377
| 0.842377
| 0.842377
| 0.842377
| 0.842377
| 0
| 0.219417
| 0.314248
| 751
| 28
| 58
| 26.821429
| 0.532039
| 0
| 0
| 0.518519
| 0
| 0
| 0.499334
| 0.146471
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
a4ce91777aedcbc4c73ae0bff83adca30bc97140
| 238
|
py
|
Python
|
strategy.py
|
Shavolski/Forex-Bot
|
0e59ed90662d188c521904230af9cb2d1f518896
|
[
"MIT"
] | 1
|
2020-02-07T01:53:19.000Z
|
2020-02-07T01:53:19.000Z
|
strategy.py
|
Shavolski/Forex-Bot
|
0e59ed90662d188c521904230af9cb2d1f518896
|
[
"MIT"
] | null | null | null |
strategy.py
|
Shavolski/Forex-Bot
|
0e59ed90662d188c521904230af9cb2d1f518896
|
[
"MIT"
] | 2
|
2021-05-26T00:56:33.000Z
|
2022-03-04T00:21:44.000Z
|
class strategyLogic():
def SMA(self, prices, length, period):
return sum(prices[(length-period):length]) / period
def SMAprev(self, prices, length, period):
return sum(prices[(length-period-1):length-1]) / period
| 34
| 63
| 0.663866
| 30
| 238
| 5.266667
| 0.4
| 0.379747
| 0.455696
| 0.278481
| 0.620253
| 0.620253
| 0.620253
| 0.620253
| 0.620253
| 0
| 0
| 0.010363
| 0.189076
| 238
| 6
| 64
| 39.666667
| 0.80829
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.4
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a4de689a0607ace409fe39e3200b81f5e8780ec1
| 1,143
|
py
|
Python
|
unit_tests/test_corrupted_agents.py
|
arenarium/battleground
|
dc58bd4898451e6498b291dcc2a671ed2e726297
|
[
"MIT"
] | 1
|
2018-01-20T10:10:50.000Z
|
2018-01-20T10:10:50.000Z
|
unit_tests/test_corrupted_agents.py
|
vincentropy/battleground
|
dc58bd4898451e6498b291dcc2a671ed2e726297
|
[
"MIT"
] | 109
|
2017-07-01T17:36:27.000Z
|
2017-12-21T22:57:24.000Z
|
unit_tests/test_corrupted_agents.py
|
arenarium/battleground
|
dc58bd4898451e6498b291dcc2a671ed2e726297
|
[
"MIT"
] | 4
|
2017-12-03T16:31:29.000Z
|
2017-12-23T17:56:25.000Z
|
from battleground.site_runner import start_session
import os
def test_corrupted_basic_agent():
# there is some randomness in the initialisation, so try many times
config_file = 'unit_tests/test_configurations/corrupted_basic_config.json'
os.environ['DEBUG'] = 'True'
exception = False
try:
start_session(config_file, save=False, run=True)
except Exception as e:
exception = True
assert exception is True
os.environ['DEBUG'] = 'False'
# should not throw error
start_session(config_file, save=False, run=True)
os.environ['DEBUG'] = 'True'
def test_corrupted_arena_agent():
# there is some randomness in the initialisation, so try many times
config_file = 'unit_tests/test_configurations/corrupted_arena_config.json'
os.environ['DEBUG'] = 'True'
exception = False
try:
start_session(config_file, save=False, run=True)
except Exception as e:
exception = True
assert exception is True
os.environ['DEBUG'] = 'False'
# should not throw error
start_session(config_file, save=False, run=True)
os.environ['DEBUG'] = 'True'
| 27.878049
| 78
| 0.698163
| 151
| 1,143
| 5.112583
| 0.304636
| 0.07772
| 0.108808
| 0.093264
| 0.865285
| 0.865285
| 0.865285
| 0.865285
| 0.865285
| 0.865285
| 0
| 0
| 0.209974
| 1,143
| 40
| 79
| 28.575
| 0.854928
| 0.154856
| 0
| 0.769231
| 0
| 0
| 0.17898
| 0.120708
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
353703fca5dc4cd8cb6591ed51f8c37f9b1bef7a
| 1,594
|
py
|
Python
|
ud_python_progr_breaktime_v3.py
|
FelipeRego/Breaktime
|
07c7fe6ad4e726f98fe396ea539b39d42b2c8fdf
|
[
"MIT"
] | null | null | null |
ud_python_progr_breaktime_v3.py
|
FelipeRego/Breaktime
|
07c7fe6ad4e726f98fe396ea539b39d42b2c8fdf
|
[
"MIT"
] | null | null | null |
ud_python_progr_breaktime_v3.py
|
FelipeRego/Breaktime
|
07c7fe6ad4e726f98fe396ea539b39d42b2c8fdf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 06 21:28:40 2015
@author: regof
"""
import time
import ctypes
count = 0
print "This break message was sent on "+time.ctime()
messageBox = ctypes.windll.user32.MessageBoxA
returnValue = messageBox(None, "Maybe it's time for you to stand up and have a break?", "News NRA Team Stand Up Program", 0x40 | 0x4)
if returnValue == 6:
messageBox = ctypes.windll.user32.MessageBoxA
returnValue = messageBox(None, "Great! I'll remind you again in 2 hrs to stand up and move!", "News NRA Team Stand Up Program", 0x40 | 0x0)
elif returnValue == 7:
messageBox = ctypes.windll.user32.MessageBoxA
returnValue = messageBox(None, "Are you sure? Stretch your back at least then... I'll be back in 2hrs!", "News NRA Team Stand Up Program", 0x40 | 0x0)
while (count < 4):
time.sleep(60*60*2)
print "This break message was sent on"+time.ctime()
messageBox = ctypes.windll.user32.MessageBoxA
returnValue = messageBox(None, "Maybe it's time for you to stand up and have a break?", "News NRA Team Stand Up Program", 0x40 | 0x4)
if returnValue == 6:
messageBox = ctypes.windll.user32.MessageBoxA
returnValue = messageBox(None, "Great! I'll remind you again in 2 hrs to stand up and move!", "News NRA Team Stand Up Program", 0x40 | 0x0)
elif returnValue == 7:
messageBox = ctypes.windll.user32.MessageBoxA
returnValue = messageBox(None, "Are you sure? Stretch your back at least then... I'll be back in 2hrs!", "News NRA Team Stand Up Program", 0x40 | 0x0)
count = count + 1
time.localtime()
| 43.081081
| 158
| 0.695734
| 243
| 1,594
| 4.563786
| 0.325103
| 0.06312
| 0.119026
| 0.151488
| 0.887286
| 0.887286
| 0.887286
| 0.887286
| 0.887286
| 0.887286
| 0
| 0.055512
| 0.197616
| 1,594
| 36
| 159
| 44.277778
| 0.811572
| 0.013174
| 0
| 0.72
| 0
| 0
| 0.400132
| 0
| 0
| 0
| 0.027778
| 0
| 0
| 0
| null | null | 0
| 0.08
| null | null | 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
35435f47eed675a76da38143234eda1e6468a05c
| 3,444
|
py
|
Python
|
tests/test_config.py
|
zanachka/scrapy-crawlera-fetch
|
d715b08877ae88211e4588b60d6a992d5ae98fdc
|
[
"BSD-3-Clause"
] | 6
|
2020-08-26T12:19:31.000Z
|
2022-03-11T13:34:31.000Z
|
tests/test_config.py
|
zanachka/scrapy-crawlera-fetch
|
d715b08877ae88211e4588b60d6a992d5ae98fdc
|
[
"BSD-3-Clause"
] | 21
|
2020-07-16T08:11:05.000Z
|
2021-12-03T18:39:49.000Z
|
tests/test_config.py
|
zanachka/scrapy-crawlera-fetch
|
d715b08877ae88211e4588b60d6a992d5ae98fdc
|
[
"BSD-3-Clause"
] | 9
|
2020-07-01T04:58:17.000Z
|
2021-03-23T08:05:55.000Z
|
from scrapy import Spider
from scrapy.utils.test import get_crawler
from crawlera_fetch import CrawleraFetchMiddleware
from tests.data import SETTINGS
def test_disable_via_setting():
class FooSpider(Spider):
name = "foo"
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(FooSpider, settings_dict={"CRAWLERA_FETCH_ENABLED": False})
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert not middleware.enabled
def test_disable_via_spider_attribute_bool():
class FooSpider(Spider):
name = "foo"
crawlera_fetch_enabled = False
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(spidercls=FooSpider)
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert not middleware.enabled
def test_disable_via_spider_attribute_int():
class FooSpider(Spider):
name = "foo"
crawlera_fetch_enabled = 0
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(spidercls=FooSpider)
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert not middleware.enabled
def test_disable_via_spider_attribute_str():
class FooSpider(Spider):
name = "foo"
crawlera_fetch_enabled = "False"
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(spidercls=FooSpider)
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert not middleware.enabled
def test_disable_override():
class FooSpider(Spider):
name = "foo"
crawlera_fetch_enabled = False
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(FooSpider, settings_dict={"CRAWLERA_FETCH_ENABLED": True})
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert not middleware.enabled
def test_no_apikey():
class FooSpider(Spider):
name = "foo"
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(settings_dict={"CRAWLERA_FETCH_ENABLED": True})
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert not middleware.enabled
def test_config_values():
FooSpider = type("FooSpider", (Spider,), {"name": "foo"})
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(spidercls=FooSpider, settings_dict=SETTINGS)
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert middleware.apikey == SETTINGS["CRAWLERA_FETCH_APIKEY"]
assert middleware.url == SETTINGS["CRAWLERA_FETCH_URL"]
assert middleware.apipass == SETTINGS["CRAWLERA_FETCH_APIPASS"]
def test_config_without_apipass():
settings = SETTINGS.copy()
settings.pop("CRAWLERA_FETCH_APIPASS", None)
FooSpider = type("FooSpider", (Spider,), {"name": "foo"})
foo_spider = FooSpider()
foo_spider.crawler = get_crawler(spidercls=FooSpider, settings_dict=settings)
middleware = CrawleraFetchMiddleware.from_crawler(foo_spider.crawler)
middleware.spider_opened(foo_spider)
assert middleware.apikey == SETTINGS["CRAWLERA_FETCH_APIKEY"]
assert middleware.url == SETTINGS["CRAWLERA_FETCH_URL"]
assert middleware.apipass == ""
| 33.115385
| 96
| 0.752904
| 389
| 3,444
| 6.367609
| 0.123393
| 0.11627
| 0.103351
| 0.071054
| 0.875656
| 0.875656
| 0.875656
| 0.875656
| 0.856681
| 0.856681
| 0
| 0.000345
| 0.157666
| 3,444
| 103
| 97
| 33.436893
| 0.853499
| 0
| 0
| 0.697368
| 0
| 0
| 0.070557
| 0.044135
| 0
| 0
| 0
| 0
| 0.157895
| 1
| 0.105263
| false
| 0.052632
| 0.052632
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
5e113203dce9c8eec8ff6b0fd05848d0b10edefa
| 14,714
|
py
|
Python
|
4-evaluation/merge_feature_with_clustering_results.py
|
saeedeldah/multi-modal-multi-level-video-data-characterization-comparison-2018-2019
|
3a44572641349b9adb62aefd669b15436efad8d0
|
[
"MIT"
] | null | null | null |
4-evaluation/merge_feature_with_clustering_results.py
|
saeedeldah/multi-modal-multi-level-video-data-characterization-comparison-2018-2019
|
3a44572641349b9adb62aefd669b15436efad8d0
|
[
"MIT"
] | null | null | null |
4-evaluation/merge_feature_with_clustering_results.py
|
saeedeldah/multi-modal-multi-level-video-data-characterization-comparison-2018-2019
|
3a44572641349b9adb62aefd669b15436efad8d0
|
[
"MIT"
] | null | null | null |
import utility as utl
import csv
class VideoClusterResult:
name = ""
shot = {}
category = ""
def __init__(self):
pass
class VideoFeatures:
name = ""
shot = {}
category = ""
def __init__(self):
pass
def fill_clustering_results_data(clustering_results_file_path):
with open(clustering_results_file_path) as f:
data_set_count = sum(1 for line in f) - 1
f.close()
with open(clustering_results_file_path, 'r') as csvFile:
reader = csv.reader(csvFile)
iteration = 0
video_list = {}
print("preparing clustering results data ...")
video_cluster_result = VideoClusterResult()
for row in reader:
if iteration > 0:
video_name = row[0]
shot_number = row[1]
category_label = row[2]
cluster_label = row[3]
if video_cluster_result.name == "":
video_cluster_result.category = category_label
video_cluster_result.name = video_name
video_cluster_result.shot = {}
video_cluster_result.shot[shot_number] = cluster_label
if video_cluster_result.name != video_name and video_cluster_result.name not in video_list:
video_list[video_cluster_result.name] = video_cluster_result
video_cluster_result = VideoClusterResult()
video_cluster_result.category = category_label
video_cluster_result.name = video_name
video_cluster_result.shot = {}
utl.print_progress_bar(iteration + 1, data_set_count)
iteration += 1
csvFile.close()
print("")
return video_list
def fill_features_data(features_file_path):
with open(features_file_path) as f:
data_set_count = sum(1 for line in f) - 1
f.close()
with open(features_file_path, 'r') as csvFile:
reader = csv.reader(csvFile)
iteration = 0
video_list = {}
print("preparing clustering results data ...")
video = VideoClusterResult()
for row in reader:
if iteration > data_set_count:
break
if iteration > 0:
video_name = row[0]
shot_number = row[1]
category_label = row[2]
features = row[3:len(row)]
if video.name == "":
video.category = category_label
video.name = video_name
video.shot = {}
video.shot[shot_number] = features
if video.name != video_name and video.name not in video_list:
video_list[video.name] = video
video = VideoClusterResult()
video.category = category_label
video.name = video_name
video.shot = {}
utl.print_progress_bar(iteration + 1, data_set_count)
iteration += 1
csvFile.close()
print("")
return video_list
def merge_features_with_clustering_results_complete(features_file_path, clustering_results_file_path,
out_put_file_path):
clustering_result_data = fill_clustering_results_data(clustering_results_file_path)
features_data = fill_features_data(features_file_path)
print "creating csv file..."
with open(out_put_file_path, 'wb') as f:
the_writer = csv.writer(f)
headers = [
"video",
"shot_number",
"category",
"cluster",
"interactions_number_speakers_2",
"interactions_number_speakers_3",
"interactions_number_speakers_4",
"interactions_number_speakers_4+",
"intervention_short",
"intervention_long",
"speakers_type_ponctuel",
"speakers_type_localise",
"speakers_type_present",
"speakers_type_regulier",
"speakers_type_important",
"speaker_distribution",
"mean_number_of_faces",
"std_number_of_faces",
"inter_intensity_variation1",
"inter_intensity_variation2",
"inter_intensity_variation3",
"inter_intensity_variation4",
"inter_intensity_variation5",
"inter_intensity_variation6",
"inter_intensity_variation7",
"inter_intensity_variation8",
"inter_intensity_variation9",
"intra_intensity_variation1",
"intra_intensity_variation2",
"intra_intensity_variation3",
"intra_intensity_variation4",
"intra_intensity_variation5",
"intra_intensity_variation6",
"intra_intensity_variation7",
"intra_intensity_variation8",
"intra_intensity_variation9",
"number_shot_transition",
"number_speaker_transition",
"speech",
"music",
"speech_with_music",
"speech_with_non_music",
"non_speech_with_music",
"non_speech_with_non_music",
"words",
"duration"
]
the_writer.writerow(headers)
video_list_length = len(clustering_result_data)
max_value = video_list_length
iteration = 1
for video_name, video in clustering_result_data.iteritems():
video_from_features = features_data[video_name]
for shot_number, cluster in video.shot.iteritems():
features = video_from_features.shot[shot_number]
vector = [video_name, shot_number, video.category, cluster] + features
the_writer.writerow(vector)
utl.print_progress_bar(iteration, max_value)
iteration += 1
f.close()
print("")
print("csv file has been created successfully")
def merge_features_with_clustering_results_normalized(features_file_path, clustering_results_file_path,
out_put_file_path):
clustering_result_data = fill_clustering_results_data(clustering_results_file_path)
features_data = fill_features_data(features_file_path)
print "creating csv file..."
with open(out_put_file_path, 'wb') as f:
the_writer = csv.writer(f)
headers = [
"video",
"shot_number",
"category",
"cluster",
"interactions_number_speakers_2",
"interactions_number_speakers_3",
"interactions_number_speakers_4",
"interactions_number_speakers_4+",
"intervention_short",
"intervention_long",
"speakers_type_ponctuel",
"speakers_type_localise",
"speakers_type_present",
"speakers_type_regulier",
"speakers_type_important",
"speaker_distribution",
"mean_number_of_faces",
"std_number_of_faces",
"inter_intensity_variation",
"intra_intensity_variation",
"number_shot_transition",
"number_speaker_transition",
"speech",
"music",
"speech_with_music",
"speech_with_non_music",
"non_speech_with_music",
"non_speech_with_non_music",
"words",
"duration"
]
the_writer.writerow(headers)
video_list_length = len(clustering_result_data)
max_value = video_list_length
iteration = 1
for video_name, video in clustering_result_data.iteritems():
video_from_features = features_data[video_name]
for shot_number, cluster in video.shot.iteritems():
features = video_from_features.shot[shot_number]
vector = [video_name, shot_number, video.category, cluster] + features
the_writer.writerow(vector)
utl.print_progress_bar(iteration, max_value)
iteration += 1
f.close()
print("")
print("csv file has been created successfully")
def generate_shot_merged_files_complete():
features_file_path = "./features/shot_features.csv"
clustering_results_file_path = "./clustering_results/k_means/complete_video/complete-intra-inter/"
output_file_path = "./mean-average-precision/k_means/complete_video/complete-intra-inter/merged-with-features/"
clustering_results_file_name_list = utl.get_file_name_list(clustering_results_file_path)
for clustering_result_file_name in clustering_results_file_name_list:
if clustering_result_file_name != "merged-with-features":
print ("generating " + clustering_result_file_name)
merge_features_with_clustering_results_complete(features_file_path,
clustering_results_file_path + clustering_result_file_name
, output_file_path + clustering_result_file_name)
print ("---------------------------------------------------------------")
def generate_shot_merged_files_normalized():
features_file_path = "./features/normalized_shot_features.csv"
clustering_results_file_path = "./clustering_results/k_means/complete_video/normalized-intra-inter/"
output_file_path = "./mean-average-precision/k_means/complete_video/normalized-intra-inter/merged-with-features/"
clustering_results_file_name_list = utl.get_file_name_list(clustering_results_file_path)
for clustering_result_file_name in clustering_results_file_name_list:
if clustering_result_file_name != "merged-with-features":
print ("generating " + clustering_result_file_name)
merge_features_with_clustering_results_normalized(features_file_path,
clustering_results_file_path + clustering_result_file_name
, output_file_path + clustering_result_file_name)
print ("---------------------------------------------------------------")
def generate_video_merged_files_complete():
features_file_path = "./features/video_features.csv"
clustering_results_file_path = "./clustering_results/k_means/video/complete-intra-inter/"
output_file_path = "./mean-average-precision/k_means/video/complete-intra-inter/merged-with-features/"
clustering_results_file_name_list = utl.get_file_name_list(clustering_results_file_path)
for clustering_result_file_name in clustering_results_file_name_list:
if clustering_result_file_name != "merged-with-features":
print ("generating " + clustering_result_file_name)
merge_features_with_clustering_results_complete(features_file_path,
clustering_results_file_path + clustering_result_file_name
, output_file_path + clustering_result_file_name)
print ("---------------------------------------------------------------")
def generate_video_merged_files_normalized():
features_file_path = "./features/normalized_video_features.csv"
clustering_results_file_path = "./clustering_results/k_means/video/normalized-intra-inter/"
output_file_path = "./mean-average-precision/k_means/video/normalized-intra-inter/merged-with-features/"
clustering_results_file_name_list = utl.get_file_name_list(clustering_results_file_path)
for clustering_result_file_name in clustering_results_file_name_list:
if clustering_result_file_name != "merged-with-features":
print ("generating " + clustering_result_file_name)
merge_features_with_clustering_results_normalized(features_file_path,
clustering_results_file_path + clustering_result_file_name
, output_file_path + clustering_result_file_name)
print ("---------------------------------------------------------------")
def generate_complete_video_merged_files_complete():
features_file_path = "./features/complete_video_features.csv"
clustering_results_file_path = "./clustering_results/k_means/complete_video/complete-intra-inter/"
output_file_path = "./mean-average-precision/k_means/complete_video/complete-intra-inter/merged-with-features/"
clustering_results_file_name_list = utl.get_file_name_list(clustering_results_file_path)
for clustering_result_file_name in clustering_results_file_name_list:
if clustering_result_file_name != "merged-with-features":
print ("generating " + clustering_result_file_name)
merge_features_with_clustering_results_complete(features_file_path,
clustering_results_file_path + clustering_result_file_name
, output_file_path + clustering_result_file_name)
print ("---------------------------------------------------------------")
def generate_complete_video_merged_files_normalized():
features_file_path = "./features/normalized_complete_video_features.csv"
clustering_results_file_path = "./clustering_results/k_means/complete_video/normalized-intra-inter/"
output_file_path = "./mean-average-precision/k_means/complete_video/normalized-intra-inter/merged-with-features/"
clustering_results_file_name_list = utl.get_file_name_list(clustering_results_file_path)
for clustering_result_file_name in clustering_results_file_name_list:
if clustering_result_file_name != "merged-with-features":
print ("generating " + clustering_result_file_name)
merge_features_with_clustering_results_normalized(features_file_path,
clustering_results_file_path + clustering_result_file_name
, output_file_path + clustering_result_file_name)
print ("---------------------------------------------------------------")
def main():
generate_shot_merged_files_complete()
# generate_video_merged_files_complete()
# generate_shot_merged_files_normalized()
# generate_video_merged_files_normalized()
# generate_complete_video_merged_files_complete()
# generate_complete_video_merged_files_normalized()
main()
| 44.723404
| 120
| 0.619682
| 1,496
| 14,714
| 5.612968
| 0.084225
| 0.057163
| 0.092533
| 0.085745
| 0.909134
| 0.885435
| 0.862689
| 0.834465
| 0.806717
| 0.796237
| 0
| 0.004746
| 0.284015
| 14,714
| 328
| 121
| 44.859756
| 0.792311
| 0.014748
| 0
| 0.767025
| 0
| 0.021505
| 0.23663
| 0.187358
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.007168
| 0.014337
| null | null | 0.09319
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5e39297e6c6a05a83f8038a7d4064abfa86c8c9e
| 166
|
py
|
Python
|
parcels/scripts/__init__.py
|
CKehl/parcels
|
6e4d6a10157a9d1a7935b08f35808b874a1cdf58
|
[
"MIT"
] | 1
|
2020-03-20T10:22:21.000Z
|
2020-03-20T10:22:21.000Z
|
parcels/scripts/__init__.py
|
CKehl/parcels
|
6e4d6a10157a9d1a7935b08f35808b874a1cdf58
|
[
"MIT"
] | null | null | null |
parcels/scripts/__init__.py
|
CKehl/parcels
|
6e4d6a10157a9d1a7935b08f35808b874a1cdf58
|
[
"MIT"
] | null | null | null |
from .plottrajectoriesfile import plotTrajectoriesFile # NOQA get flake8 to ignore unused import.
from .plottrajectoriesfile import plotTrajectoriesFile_loadedField
| 55.333333
| 98
| 0.873494
| 16
| 166
| 9
| 0.625
| 0.333333
| 0.416667
| 0.694444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006711
| 0.10241
| 166
| 2
| 99
| 83
| 0.959732
| 0.240964
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
eae27e77cd5b0ee7052fc36a20b9c1e51b66b7e9
| 38
|
py
|
Python
|
Forward_Warp/__init__.py
|
hologerry/Forward-Warp
|
82a32a372383b3c69f9666cb5b8189dbfb05d328
|
[
"MIT"
] | 81
|
2019-07-04T20:51:34.000Z
|
2022-03-26T15:58:42.000Z
|
Forward_Warp/__init__.py
|
hologerry/Forward-Warp
|
82a32a372383b3c69f9666cb5b8189dbfb05d328
|
[
"MIT"
] | 9
|
2020-05-04T04:59:16.000Z
|
2021-12-21T19:06:31.000Z
|
Forward_Warp/__init__.py
|
hologerry/Forward-Warp
|
82a32a372383b3c69f9666cb5b8189dbfb05d328
|
[
"MIT"
] | 9
|
2019-09-04T02:09:12.000Z
|
2021-11-27T09:31:49.000Z
|
from .forward_warp import forward_warp
| 38
| 38
| 0.894737
| 6
| 38
| 5.333333
| 0.666667
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 38
| 1
| 38
| 38
| 0.914286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d8164fe293053d95168cf3f8081dfe86918e89aa
| 2,205
|
py
|
Python
|
usaspending_api/awards/migrations/0013_add_new_location_fpds_fabs.py
|
truthiswill/usaspending-api
|
bd7d915442e2ec94cc830c480ceeffd4479be6c0
|
[
"CC0-1.0"
] | null | null | null |
usaspending_api/awards/migrations/0013_add_new_location_fpds_fabs.py
|
truthiswill/usaspending-api
|
bd7d915442e2ec94cc830c480ceeffd4479be6c0
|
[
"CC0-1.0"
] | 3
|
2020-02-12T01:16:46.000Z
|
2021-06-10T20:36:57.000Z
|
usaspending_api/awards/migrations/0013_add_new_location_fpds_fabs.py
|
truthiswill/usaspending-api
|
bd7d915442e2ec94cc830c480ceeffd4479be6c0
|
[
"CC0-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-12-20 20:51
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('awards', '0012_upper_index_fain_uri_awards'),
]
operations = [
migrations.AddField(
model_name='transactionfabs',
name='place_of_perfor_state_code',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfabs',
name='place_of_perform_zip_last4',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfabs',
name='place_of_performance_zip5',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfpds',
name='legal_entity_county_code',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfpds',
name='legal_entity_county_name',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfpds',
name='legal_entity_zip5',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfpds',
name='legal_entity_zip_last4',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfpds',
name='place_of_perform_county_co',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfpds',
name='place_of_perform_zip_last4',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='transactionfpds',
name='place_of_performance_zip5',
field=models.TextField(blank=True, null=True),
),
]
| 32.910448
| 58
| 0.599546
| 217
| 2,205
| 5.847926
| 0.271889
| 0.141844
| 0.181245
| 0.212766
| 0.799842
| 0.799842
| 0.799842
| 0.799842
| 0.758077
| 0.758077
| 0
| 0.017308
| 0.292517
| 2,205
| 66
| 59
| 33.409091
| 0.796154
| 0.030839
| 0
| 0.745763
| 1
| 0
| 0.201031
| 0.119963
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.050847
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
dc447f8f224367f22f5aaa337bbc3eb5c5483367
| 11,319
|
py
|
Python
|
fe.py
|
RyanWhitell/Thesis
|
019feb61896ca1d5ab87e71910f3f31fcb6b08fe
|
[
"MIT"
] | null | null | null |
fe.py
|
RyanWhitell/Thesis
|
019feb61896ca1d5ab87e71910f3f31fcb6b08fe
|
[
"MIT"
] | null | null | null |
fe.py
|
RyanWhitell/Thesis
|
019feb61896ca1d5ab87e71910f3f31fcb6b08fe
|
[
"MIT"
] | 1
|
2020-06-14T20:28:03.000Z
|
2020-06-14T20:28:03.000Z
|
import os
import h5py
import pickle
import time
import multiprocessing
from tqdm import tqdm
import sklearn
import traceback
import pandas as pd
import ast
import librosa as lr
import numpy as np
import argparse
import FMA
parser = argparse.ArgumentParser(description="extracts features")
parser.add_argument('-d', '--dataset', required=True, help='dataset to use: fma_med, fma_large, spotify')
parser.add_argument('-f', '--features', required=True, help='which features to extract: stft, stft_halved, mel_scaled_stft, cqt, chroma, mfcc')
parser.add_argument('-q', '--quick', default=False, help='runs each extraction quickly to ensure they will extract')
parser.add_argument('-c', '--cores', default=1, help='number of cores to use')
args = parser.parse_args()
def get_fma_stft(track_id):
scaler = sklearn.preprocessing.StandardScaler()
sr=22050
n_fft=4096
hop_length=1024
win_length=4096
window='hann'
tid_str = '{:06d}'.format(track_id)
file_path = os.path.join('Data/fma_large', tid_str[:3], tid_str + '.mp3')
try:
y, _ = lr.load(path=file_path, sr=sr)
spectrum = lr.stft(y=y, n_fft=n_fft, hop_length=hop_length, win_length=win_length, window=window)
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(lr.amplitude_to_db(np.abs(spectrum[:,:643])))
def get_fma_stft_halved(track_id):
scaler = sklearn.preprocessing.StandardScaler()
sr=22050
n_fft=4096
hop_length=1024
win_length=4096
window='hann'
tid_str = '{:06d}'.format(track_id)
file_path = os.path.join('Data/fma_large', tid_str[:3], tid_str + '.mp3')
try:
y, _ = lr.load(path=file_path, sr=sr)
spectrum = lr.stft(y=y, n_fft=n_fft, hop_length=hop_length, win_length=win_length, window=window)
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(lr.amplitude_to_db(np.abs(spectrum[:,:643])))[0:1024,:]
def get_fma_mel_scaled_stft(track_id):
scaler = sklearn.preprocessing.StandardScaler()
sr=22050
n_fft=4096
hop_length=1024
#win_length=4096
#window='hann'
tid_str = '{:06d}'.format(track_id)
file_path = os.path.join('Data/fma_large', tid_str[:3], tid_str + '.mp3')
try:
y, _ = lr.load(path=file_path, sr=sr)
mel_spec = lr.feature.melspectrogram(y=y, sr=sr, n_fft=n_fft, hop_length=hop_length, n_mels=256)
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(lr.power_to_db(mel_spec[:,:643]))
def get_fma_cqt(track_id):
scaler = sklearn.preprocessing.StandardScaler()
sr=22050
hop_length=1024
window='hann'
tid_str = '{:06d}'.format(track_id)
file_path = os.path.join('Data/fma_large', tid_str[:3], tid_str + '.mp3')
try:
y, _ = lr.load(path=file_path, sr=sr)
cqt = np.abs(lr.core.cqt(y=y, sr=sr, hop_length=hop_length, window=window, n_bins=84*2, bins_per_octave=12*2))
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(lr.amplitude_to_db(cqt[:,:643]))
def get_fma_chroma(track_id):
scaler = sklearn.preprocessing.StandardScaler()
# cqt:
sr=22050
hop_length=1024
window='hann'
#n_bins=n_octaves * bins_per_octave
bins_per_octave=12*2
# fold:
# n_chroma=12
# n_octaves=7
tid_str = '{:06d}'.format(track_id)
file_path = os.path.join('Data/fma_large', tid_str[:3], tid_str + '.mp3')
try:
y, _ = lr.load(path=file_path, sr=sr)
chroma = lr.feature.chroma_cqt(y=y, sr=sr, hop_length=hop_length, bins_per_octave=bins_per_octave)
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(chroma[:,:643])
def get_fma_mfcc(track_id):
scaler = sklearn.preprocessing.StandardScaler()
# mfcc:
sr=22050
n_mfcc=13
# mel-scaled spectrogram
kwargs = {'n_fft':4096, 'hop_length':1024, 'n_mels':256}
tid_str = '{:06d}'.format(track_id)
file_path = os.path.join('Data/fma_large', tid_str[:3], tid_str + '.mp3')
try:
y, _ = lr.load(path=file_path, sr=sr)
mfcc = lr.feature.mfcc(y=y, sr=sr, n_mfcc=n_mfcc, **kwargs)[1:]
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(mfcc[:,:643])
def extract_fma(ids, fma_set, features, quick, cores):
if quick:
ids = ids[:100]
file_path = './Data/features/DELETE.fma_' + fma_set + '_' + features + '.hdf5'
else:
file_path = './Data/features/fma_' + fma_set + '_' + features + '.hdf5'
if os.path.isfile(file_path):
f = h5py.File(file_path, 'a')
data = f['data']
ids = list(set(ids) - set([int(x) for x in data.keys()]))
print(f'File already in path, attempting to add missing ids of which there are {len(ids)}')
else:
f = h5py.File(file_path, 'a')
data = f.create_group('data')
func = {
'stft': get_fma_stft,
'stft_halved': get_fma_stft_halved,
'mel_scaled_stft': get_fma_mel_scaled_stft,
'cqt': get_fma_cqt,
'chroma': get_fma_chroma,
'mfcc': get_fma_mfcc
}
pool = multiprocessing.Pool(cores)
for i, spec in tqdm(pool.imap_unordered(func[features], ids), total=len(ids)):
if spec is not None:
data[str(i)] = spec
f.close()
def get_spotify_stft(track_id):
pass
def get_spotify_stft_halved(track_id):
scaler = sklearn.preprocessing.StandardScaler()
sr=22050
n_fft=4096
hop_length=1024
win_length=4096
window='hann'
file_path = './Data/Spotify/audio/' + track_id + '.mp3'
try:
y, _ = lr.load(path=file_path, sr=sr)
spectrum = lr.stft(y=y, n_fft=n_fft, hop_length=hop_length, win_length=win_length, window=window)
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(lr.amplitude_to_db(np.abs(spectrum[:,:643])))[0:1024,:]
def get_spotify_mel_scaled_stft(track_id):
scaler = sklearn.preprocessing.StandardScaler()
sr=22050
n_fft=4096
hop_length=1024
#win_length=4096
#window='hann'
file_path = './Data/Spotify/audio/' + track_id + '.mp3'
try:
y, _ = lr.load(path=file_path, sr=sr)
mel_spec = lr.feature.melspectrogram(y=y, sr=sr, n_fft=n_fft, hop_length=hop_length, n_mels=256)
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(lr.power_to_db(mel_spec[:,:643]))
def get_spotify_cqt(track_id):
scaler = sklearn.preprocessing.StandardScaler()
sr=22050
hop_length=1024
window='hann'
file_path = './Data/Spotify/audio/' + track_id + '.mp3'
try:
y, _ = lr.load(path=file_path, sr=sr)
cqt = np.abs(lr.core.cqt(y=y, sr=sr, hop_length=hop_length, window=window, n_bins=84*2, bins_per_octave=12*2))
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(lr.amplitude_to_db(cqt[:,:643]))
def get_spotify_chroma(track_id):
scaler = sklearn.preprocessing.StandardScaler()
# cqt:
sr=22050
hop_length=1024
window='hann'
#n_bins=n_octaves * bins_per_octave
bins_per_octave=12*2
# fold:
# n_chroma=12
# n_octaves=7
file_path = './Data/Spotify/audio/' + track_id + '.mp3'
try:
y, _ = lr.load(path=file_path, sr=sr)
chroma = lr.feature.chroma_cqt(y=y, sr=sr, hop_length=hop_length, bins_per_octave=bins_per_octave)
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(chroma[:,:643])
def get_spotify_mfcc(track_id):
scaler = sklearn.preprocessing.StandardScaler()
# mfcc:
sr=22050
n_mfcc=13
# mel-scaled spectrogram
kwargs = {'n_fft':4096, 'hop_length':1024, 'n_mels':256}
# Can use the pre-computed mel scaled spectrogram
file_path = './Data/features/spotify_mel_scaled_stft.hdf5'
try:
with h5py.File(file_path,'r') as f:
S = f['data'][str(track_id)][()]
mfcc = lr.feature.mfcc(S=S, sr=sr, n_mfcc=n_mfcc, **kwargs)[1:]
except Exception:
traceback.print_exc()
print('*'*20, str(track_id))
return track_id, None
return track_id, scaler.fit_transform(mfcc[:,:643])
def extract_spotify(ids, features, quick, cores):
if quick:
ids = ids[:100]
file_path = './Data/features/DELETE.spotify_' + features + '.hdf5'
else:
file_path = './Data/features/spotify_' + features + '.hdf5'
if os.path.isfile(file_path):
f = h5py.File(file_path, 'a')
data = f['data']
ids = list(set(ids) - set(data.keys()))
print(f'File already in path, attempting to add missing ids of which there are {len(ids)}')
else:
f = h5py.File(file_path, 'a')
data = f.create_group('data')
func = {
'stft': get_spotify_stft,
'stft_halved': get_spotify_stft_halved,
'mel_scaled_stft': get_spotify_mel_scaled_stft,
'cqt': get_spotify_cqt,
'chroma': get_spotify_chroma,
'mfcc': get_spotify_mfcc
}
pool = multiprocessing.Pool(cores)
for i, spec in tqdm(pool.imap_unordered(func[features], ids), total=len(ids)):
if spec is not None:
data[str(i)] = spec
f.close()
if __name__=='__main__':
print('File Start...')
file_start = time.perf_counter()
if args.dataset == 'fma_med':
FMA = FMA.FreeMusicArchive('medium', 22050)
ids = FMA.TRACKS.index.values
extract_fma(ids, 'med', args.features, args.quick, int(args.cores))
if args.dataset == 'fma_large':
FMA = FMA.FreeMusicArchive('large', 22050)
ids = FMA.TRACKS.index.values
extract_fma(ids, 'large', args.features, args.quick, int(args.cores))
if args.dataset == 'spotify':
with open('./Data/Spotify/data_checkpoint.pickle', 'rb') as f:
save = pickle.load(f)
TRACKS = save['tracks']
del save
ids = TRACKS.index.values
extract_spotify(ids, args.features, args.quick, int(args.cores))
print(f'Total file execution time: {time.perf_counter()-file_start:.2f}s')
| 30.345845
| 144
| 0.618076
| 1,578
| 11,319
| 4.197085
| 0.119138
| 0.059188
| 0.043183
| 0.033218
| 0.807791
| 0.787408
| 0.779556
| 0.763702
| 0.763702
| 0.751321
| 0
| 0.036537
| 0.24799
| 11,319
| 373
| 145
| 30.345845
| 0.741541
| 0.026504
| 0
| 0.704545
| 0
| 0
| 0.112616
| 0.026719
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05303
| false
| 0.003788
| 0.05303
| 0
| 0.189394
| 0.098485
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc52eb1f1c50d98afe4fa89b1124cd6fbc58c83a
| 138
|
py
|
Python
|
alura-python/best-practices/test-long.py
|
wiltonpaulo/python-fullcourse
|
5befe60221a2e6f8a567a11e2f449245c11b3447
|
[
"MIT"
] | null | null | null |
alura-python/best-practices/test-long.py
|
wiltonpaulo/python-fullcourse
|
5befe60221a2e6f8a567a11e2f449245c11b3447
|
[
"MIT"
] | null | null | null |
alura-python/best-practices/test-long.py
|
wiltonpaulo/python-fullcourse
|
5befe60221a2e6f8a567a11e2f449245c11b3447
|
[
"MIT"
] | null | null | null |
print("This is a long line to be printed on python exceeding",
"the 79 characte to be printed on python exceeding the 79 charact")
| 46
| 74
| 0.731884
| 24
| 138
| 4.208333
| 0.666667
| 0.079208
| 0.217822
| 0.257426
| 0.653465
| 0.653465
| 0.653465
| 0.653465
| 0
| 0
| 0
| 0.037037
| 0.217391
| 138
| 2
| 75
| 69
| 0.898148
| 0
| 0
| 0
| 0
| 0
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
dc857390422bc62ab9d2c4925ddeaed3432f13b7
| 6,404
|
py
|
Python
|
migration/tests/test_dump.py
|
tlasich/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 411
|
2016-06-14T20:52:25.000Z
|
2022-03-31T21:20:25.000Z
|
migration/tests/test_dump.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 5,730
|
2016-05-23T21:04:32.000Z
|
2022-03-31T10:08:06.000Z
|
migration/tests/test_dump.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 423
|
2016-09-22T21:11:30.000Z
|
2022-03-29T18:55:28.000Z
|
from argparse import Namespace
from io import StringIO
from pathlib import Path
import sys
from tempfile import TemporaryDirectory
from types import SimpleNamespace
from unittest import TestCase
from unittest.mock import patch
from migrator.main import dump
COURSE_DB_FRAGMENT = """
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.12 (Ubuntu 10.12-0ubuntu0.18.04.1)
-- Dumped by pg_dump version 10.12 (Ubuntu 10.12-0ubuntu0.18.04.1)
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- Name: notifications_component; Type: TYPE; Schema: public; Owner: -
--
CREATE TYPE public.notifications_component AS ENUM (
'forum',
'student',
'grading',
'team'
);
"""
COURSE_DB_EXPECTED = """
--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- Name: notifications_component; Type: TYPE; Schema: public; Owner: -
--
CREATE TYPE public.notifications_component AS ENUM (
'forum',
'student',
'grading',
'team'
);
"""
MASTER_DB_FRAGMENT = """
--
-- PostgreSQL database dump
--
-- Dumped from database version 10.12 (Ubuntu 10.12-0ubuntu0.18.04.1)
-- Dumped by pg_dump version 10.12 (Ubuntu 10.12-0ubuntu0.18.04.1)
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- Name: courses; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.courses (
semester character varying(255) NOT NULL,
course character varying(255) NOT NULL,
status smallint DEFAULT 1 NOT NULL
);
"""
MASTER_DB_EXPECTED = """
--
-- PostgreSQL database dump
--
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
--
-- Name: courses; Type: TABLE; Schema: public; Owner: -
--
CREATE TABLE public.courses (
semester character varying(255) NOT NULL,
course character varying(255) NOT NULL,
status smallint DEFAULT 1 NOT NULL
);
"""
class TestDump(TestCase):
def setUp(self):
sys.stdout = StringIO()
def tearDown(self):
sys.stdout = sys.__stdout__
@patch('migrator.dumper.check_output', side_effect=[
MASTER_DB_FRAGMENT,
COURSE_DB_FRAGMENT
])
def test_dump_all(self, subprocess):
with TemporaryDirectory() as tmp_dirname:
config = SimpleNamespace()
config.database = {
'database_driver': 'psql'
}
args = Namespace()
args.config = config
args.environments = ['master', 'course']
args.path = tmp_dirname
data_dir = Path(tmp_dirname, 'data')
data_dir.mkdir()
dump(args)
submitty_db = data_dir / 'submitty_db.sql'
self.assertTrue(submitty_db.exists())
self.assertEqual(MASTER_DB_EXPECTED, submitty_db.read_text())
course_db = data_dir / 'course_tables.sql'
self.assertTrue(course_db.exists())
self.assertEqual(COURSE_DB_EXPECTED, course_db.read_text())
self.assertRegex(
sys.stdout.getvalue(),
r"Dumping master environment to .*/data/submitty_db.sql... DONE\n" +
r"Dumping course environment to .*/data/course_tables.sql... DONE\n"
)
@patch('migrator.dumper.check_output', side_effect=[
MASTER_DB_FRAGMENT
])
def test_dump_master(self, subprocess):
with TemporaryDirectory() as tmp_dirname:
config = SimpleNamespace()
config.database = {
'database_driver': 'psql'
}
args = Namespace()
args.config = config
args.environments = ['master']
args.path = tmp_dirname
data_dir = Path(tmp_dirname, 'data')
data_dir.mkdir()
dump(args)
submitty_db = data_dir / 'submitty_db.sql'
self.assertTrue(submitty_db.exists())
self.assertEqual(MASTER_DB_EXPECTED, submitty_db.read_text())
self.assertRegex(
sys.stdout.getvalue(),
r"Dumping master environment to .*/data/submitty_db.sql... DONE"
)
@patch('migrator.dumper.check_output', side_effect=[
COURSE_DB_EXPECTED
])
def test_dump_course(self, subprocess):
with TemporaryDirectory() as tmp_dirname:
config = SimpleNamespace()
config.database = {
'database_driver': 'psql'
}
args = Namespace()
args.config = config
args.environments = ['course']
args.path = tmp_dirname
data_dir = Path(tmp_dirname, 'data')
data_dir.mkdir()
dump(args)
course_db = data_dir / 'course_tables.sql'
self.assertTrue(course_db.exists())
self.assertEqual(COURSE_DB_EXPECTED, course_db.read_text())
self.assertRegex(
sys.stdout.getvalue(),
r"Dumping course environment to .*/data/course_tables.sql... DONE"
)
def test_dump_non_psql_driver(self):
with self.assertRaises(SystemExit) as cm:
config = SimpleNamespace()
config.database = {
'database_driver': 'sqlite'
}
args = Namespace()
args.config = config
dump(args)
self.assertEqual(str(cm.exception), 'Cannot dump schema for non-postgresql database')
| 28.717489
| 93
| 0.642411
| 743
| 6,404
| 5.323015
| 0.193809
| 0.024273
| 0.033375
| 0.027307
| 0.867509
| 0.852086
| 0.839697
| 0.829583
| 0.829583
| 0.829583
| 0
| 0.018864
| 0.254997
| 6,404
| 222
| 94
| 28.846847
| 0.810103
| 0
| 0
| 0.778351
| 0
| 0.020619
| 0.475953
| 0.124297
| 0
| 0
| 0
| 0
| 0.06701
| 1
| 0.030928
| false
| 0
| 0.046392
| 0
| 0.082474
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dc9a83872294569313cf449f8cde27e5b9e3d9ad
| 1,440
|
py
|
Python
|
main_app/models.py
|
Jonak-Adipta-Kalita/JAK-Website
|
39c3723e95d99e990a2e23dbb05746def2ac903a
|
[
"MIT"
] | 1
|
2021-08-31T14:21:16.000Z
|
2021-08-31T14:21:16.000Z
|
main_app/models.py
|
Jonak-Adipta-Kalita/JAK-Website
|
39c3723e95d99e990a2e23dbb05746def2ac903a
|
[
"MIT"
] | 74
|
2021-11-03T03:19:12.000Z
|
2022-03-31T03:23:49.000Z
|
main_app/models.py
|
Jonak-Adipta-Kalita/JAK-Website
|
39c3723e95d99e990a2e23dbb05746def2ac903a
|
[
"MIT"
] | null | null | null |
from django.db import models
class Game_Own(models.Model):
game_id = models.AutoField
game_name = models.CharField(max_length=500, default="")
game_link = models.CharField(max_length=500, default="")
game_link = models.CharField(max_length=500, default="")
desc = models.CharField(max_length=5000, default="")
image = models.ImageField(upload_to="image", default="")
def __str__(self):
return self.game_name + " - " + self.game_link
class Game_Fav(models.Model):
game_id = models.AutoField
game_name = models.CharField(max_length=500, default="")
game_link = models.CharField(max_length=500, default="")
desc = models.CharField(max_length=5000, default="")
image = models.ImageField(upload_to="image", default="")
def __str__(self):
return self.game_name + " - " + self.game_link
class Contact(models.Model):
msg_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50)
email = models.CharField(max_length=70, default="")
phone = models.CharField(max_length=70, default="")
desc = models.CharField(max_length=500, default="")
def __str__(self):
return self.name
class My_Photo(models.Model):
photo_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50, default="")
image = models.ImageField(upload_to="image", default="")
def __str__(self):
return self.name
| 32
| 60
| 0.692361
| 187
| 1,440
| 5.069519
| 0.213904
| 0.189873
| 0.227848
| 0.303797
| 0.892405
| 0.892405
| 0.790084
| 0.753165
| 0.753165
| 0.753165
| 0
| 0.0285
| 0.171528
| 1,440
| 44
| 61
| 32.727273
| 0.766136
| 0
| 0
| 0.625
| 0
| 0
| 0.014583
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.03125
| 0.125
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
dca031d3e95f32dcee75fed9b57beac87f19fa75
| 5,717
|
py
|
Python
|
train_val.py
|
actionLUO/caffe-u-net
|
81525e862f84694c5594a6ffb9a33c53105ecec9
|
[
"MIT"
] | 15
|
2017-11-28T17:17:37.000Z
|
2019-02-19T01:41:11.000Z
|
train_val.py
|
actionLUO/caffe-u-net
|
81525e862f84694c5594a6ffb9a33c53105ecec9
|
[
"MIT"
] | null | null | null |
train_val.py
|
actionLUO/caffe-u-net
|
81525e862f84694c5594a6ffb9a33c53105ecec9
|
[
"MIT"
] | 5
|
2017-12-04T18:32:54.000Z
|
2019-05-23T04:02:08.000Z
|
import caffe
import numpy as np
from PIL import Image
import random
class VALSegDataLayer(caffe.Layer):
def setup(self, bottom, top):
# config
params = eval(self.param_str)
self.val_dir = params['val_dir']
self.split = params['split']
self.mean = np.array(params['mean'])
self.random = params.get('randomize', True)
self.seed = params.get('seed', None)
# two tops: data and label
if len(top) != 2:
raise Exception("Need to define two tops: data and label.")
# data layers have no bottoms
if len(bottom) != 0:
raise Exception("Do not define a bottom.")
# load indices for images and labels
split_f = '{}/ImageSets/Segmentation/{}.txt'.format(self.val_dir,
self.split)
self.indices = open(split_f, 'r').read().splitlines()
self.idx = 0
# make eval deterministic
if 'train' not in self.split:
self.random = False
# randomization: seed and pick
if self.random:
random.seed(self.seed)
self.idx = random.randint(0, len(self.indices)-1)
def reshape(self, bottom, top):
# load image + label image pair
self.data = self.load_image(self.indices[self.idx])
self.label = self.load_label(self.indices[self.idx])
# reshape tops to fit (leading 1 is for batch dimension)
top[0].reshape(1, *self.data.shape)
top[1].reshape(1, *self.label.shape)
def forward(self, bottom, top):
# assign output
top[0].data[...] = self.data
top[1].data[...] = self.label
# pick next input
if self.random:
self.idx = random.randint(0, len(self.indices)-1)
else:
self.idx += 1
if self.idx == len(self.indices):
self.idx = 0
def backward(self, top, propagate_down, bottom):
pass
def load_image(self, idx):
"""
Load input image and preprocess for Caffe:
- cast to float
- switch channels RGB -> BGR
- subtract mean
- transpose to channel x height x width order
"""
im = Image.open('{}/PNGImages/{}.png'.format(self.val_dir, idx))
im = im.resize((500, 500))
in_ = np.array(im, dtype=np.float32)
in_ = in_[:,:,::-1]
in_ -= self.mean
in_ = in_.transpose((2,0,1))
return in_
def load_label(self, idx):
"""
Load label image as 1 x height x width integer array of label indices.
The leading singleton dimension is required by the loss.
"""
im = Image.open('{}/SegmentationClass/{}.png'.format(self.val_dir, idx))
im = im.resize((500, 500))
label = np.array(im, dtype=np.uint8)
label = label[np.newaxis, ...]
return label
class TRAINSegDataLayer(caffe.Layer):
def setup(self, bottom, top):
# config
params = eval(self.param_str)
self.train_dir = params['train_dir']
self.split = params['split']
self.mean = np.array(params['mean'])
self.random = params.get('randomize', True)
self.seed = params.get('seed', None)
# two tops: data and label
if len(top) != 2:
raise Exception("Need to define two tops: data and label.")
# data layers have no bottoms
if len(bottom) != 0:
raise Exception("Do not define a bottom.")
# load indices for images and labels
split_f = '{}/ImageSets/Segmentation/{}.txt'.format(self.train_dir,
self.split)
self.indices = open(split_f, 'r').read().splitlines()
self.idx = 0
# make eval deterministic
if 'train' not in self.split:
self.random = False
# randomization: seed and pick
if self.random:
random.seed(self.seed)
self.idx = random.randint(0, len(self.indices)-1)
def reshape(self, bottom, top):
# load image + label image pair
self.data = self.load_image(self.indices[self.idx])
self.label = self.load_label(self.indices[self.idx])
# reshape tops to fit (leading 1 is for batch dimension)
top[0].reshape(1, *self.data.shape)
top[1].reshape(1, *self.label.shape)
def forward(self, bottom, top):
# assign output
top[0].data[...] = self.data
top[1].data[...] = self.label
# pick next input
if self.random:
self.idx = random.randint(0, len(self.indices)-1)
else:
self.idx += 1
if self.idx == len(self.indices):
self.idx = 0
def backward(self, top, propagate_down, bottom):
pass
def load_image(self, idx):
"""
Load input image and preprocess for Caffe:
- cast to float
- switch channels RGB -> BGR
- subtract mean
- transpose to channel x height x width order
"""
im = Image.open('{}/PNGImages/{}.png'.format(self.train_dir, idx))
im = im.resize((500, 500))
in_ = np.array(im, dtype=np.float32)
in_ = in_[:,:,::-1]
in_ -= self.mean
in_ = in_.transpose((2,0,1))
return in_
def load_label(self, idx):
"""
Load label image as 1 x height x width integer array of label indices.
The leading singleton dimension is required by the loss.
"""
im = Image.open('{}/SegmentationClass/{}.png'.format(self.train_dir, idx))
im = im.resize((500, 500))
label = np.array(im, dtype=np.uint8)
label = label[np.newaxis, ...]
return label
| 31.070652
| 82
| 0.560784
| 740
| 5,717
| 4.278378
| 0.172973
| 0.04422
| 0.024637
| 0.034112
| 0.95578
| 0.95578
| 0.95578
| 0.95578
| 0.95578
| 0.95578
| 0
| 0.018349
| 0.313626
| 5,717
| 183
| 83
| 31.240437
| 0.788481
| 0.18926
| 0
| 0.867925
| 0
| 0
| 0.079676
| 0.026559
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113208
| false
| 0.018868
| 0.037736
| 0
| 0.207547
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f4a0293aa55db3ff61bd4075dc5d057e45957289
| 187
|
py
|
Python
|
IDG/__init__.py
|
shoter/IotDeploymentGenerator
|
dbdeb792d41395ea2a0255c1dad2644e9118a68a
|
[
"MIT"
] | 5
|
2021-07-21T07:11:22.000Z
|
2022-03-24T23:06:34.000Z
|
IDG/__init__.py
|
shoter/IotDeploymentGenerator
|
dbdeb792d41395ea2a0255c1dad2644e9118a68a
|
[
"MIT"
] | 5
|
2021-07-19T21:56:17.000Z
|
2021-11-21T15:32:10.000Z
|
IDG/__init__.py
|
shoter/IotDeploymentGenerator
|
dbdeb792d41395ea2a0255c1dad2644e9118a68a
|
[
"MIT"
] | null | null | null |
from IDG.jsonObject import JsonObject
from IDG.deployment import Deployment
from IDG.module import Module
from IDG.routeSettings import RouteSettings
from IDG.jsonObject import JsonObject
| 37.4
| 43
| 0.871658
| 25
| 187
| 6.52
| 0.28
| 0.214724
| 0.208589
| 0.282209
| 0.404908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101604
| 187
| 5
| 44
| 37.4
| 0.970238
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
76036dec3201c7aeedd8bf31d7e3367692fc7bac
| 488
|
py
|
Python
|
quarkchain/cluster/guardian.py
|
tim-yoshi/pyquarkchain
|
1847542c166a180b5ffc3c6e917751be85fa15a6
|
[
"MIT"
] | 237
|
2018-09-18T00:47:14.000Z
|
2022-03-21T21:43:07.000Z
|
quarkchain/cluster/guardian.py
|
tim-yoshi/pyquarkchain
|
1847542c166a180b5ffc3c6e917751be85fa15a6
|
[
"MIT"
] | 409
|
2018-09-18T01:02:29.000Z
|
2022-01-24T20:51:58.000Z
|
quarkchain/cluster/guardian.py
|
tim-yoshi/pyquarkchain
|
1847542c166a180b5ffc3c6e917751be85fa15a6
|
[
"MIT"
] | 125
|
2018-09-18T00:47:28.000Z
|
2022-03-24T20:00:46.000Z
|
class Guardian:
@staticmethod
def adjust_difficulty(original_difficulty: int, block_height: int):
return original_difficulty // 1000
# TODO: decide on the parameters for mainnet
# if block_height < 1000:
# return original_difficulty // 1000
# if block_height < 10000:
# return original_difficulty // 100
# if block_height < 100000:
# return original_difficulty // 10
# return original_difficulty
| 37.538462
| 71
| 0.639344
| 50
| 488
| 6.02
| 0.48
| 0.358804
| 0.398671
| 0.186047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 0.297131
| 488
| 12
| 72
| 40.666667
| 0.795918
| 0.528689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7626a608f4b388f82b89ef205b2e842df1af1221
| 136
|
py
|
Python
|
7KYU/remove_chars.py
|
yaznasivasai/python_codewars
|
25493591dde4649dc9c1ec3bece8191a3bed6818
|
[
"MIT"
] | 4
|
2021-07-17T22:48:03.000Z
|
2022-03-25T14:10:58.000Z
|
7KYU/remove_chars.py
|
yaznasivasai/python_codewars
|
25493591dde4649dc9c1ec3bece8191a3bed6818
|
[
"MIT"
] | null | null | null |
7KYU/remove_chars.py
|
yaznasivasai/python_codewars
|
25493591dde4649dc9c1ec3bece8191a3bed6818
|
[
"MIT"
] | 3
|
2021-06-14T14:18:16.000Z
|
2022-03-16T06:02:02.000Z
|
from string import ascii_letters
def remove_chars(s: str) -> str:
return ''.join([i for i in s if i in ascii_letters or i == ' '])
| 27.2
| 68
| 0.661765
| 25
| 136
| 3.48
| 0.68
| 0.275862
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213235
| 136
| 5
| 68
| 27.2
| 0.813084
| 0
| 0
| 0
| 0
| 0
| 0.007299
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
762b3b62b5139223d68011a23a951a34085c19d8
| 44,262
|
py
|
Python
|
OmniDB/OmniDB_app/views/tree_mysql.py
|
swipswaps/OmniDB
|
03d2d791c50455176d20bc3513a48ff584164439
|
[
"MIT"
] | 1
|
2019-05-29T19:46:28.000Z
|
2019-05-29T19:46:28.000Z
|
OmniDB/OmniDB_app/views/tree_mysql.py
|
swipswaps/OmniDB
|
03d2d791c50455176d20bc3513a48ff584164439
|
[
"MIT"
] | null | null | null |
OmniDB/OmniDB_app/views/tree_mysql.py
|
swipswaps/OmniDB
|
03d2d791c50455176d20bc3513a48ff584164439
|
[
"MIT"
] | 1
|
2019-03-11T06:57:43.000Z
|
2019-03-11T06:57:43.000Z
|
from django.http import HttpResponse
from django.template import loader
from django.http import JsonResponse
from django.core import serializers
import json
import sys
import OmniDB_app.include.Spartacus as Spartacus
import OmniDB_app.include.Spartacus.Database as Database
import OmniDB_app.include.Spartacus.Utils as Utils
from OmniDB_app.include.Session import Session
from datetime import datetime
def get_tree_info(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = {
'v_mode': 'database',
'v_database_return': {
'v_database': v_database.GetName(),
'version': v_database.GetVersion(),
'v_username': v_database.GetUserName(),
'superuser': v_database.GetUserSuper(),
'create_role': v_database.TemplateCreateRole().v_text,
'alter_role': v_database.TemplateAlterRole().v_text,
'drop_role': v_database.TemplateDropRole().v_text,
#'create_tablespace': v_database.TemplateCreateTablespace().v_text,
#'alter_tablespace': v_database.TemplateAlterTablespace().v_text,
#'drop_tablespace': v_database.TemplateDropTablespace().v_text,
'create_database': v_database.TemplateCreateDatabase().v_text,
'alter_database': v_database.TemplateAlterDatabase().v_text,
'drop_database': v_database.TemplateDropDatabase().v_text,
#'create_sequence': v_database.TemplateCreateSequence().v_text,
#'alter_sequence': v_database.TemplateAlterSequence().v_text,
#'drop_sequence': v_database.TemplateDropSequence().v_text,
'create_function': v_database.TemplateCreateFunction().v_text,
'drop_function': v_database.TemplateDropFunction().v_text,
'create_procedure': v_database.TemplateCreateProcedure().v_text,
'drop_procedure': v_database.TemplateDropProcedure().v_text,
#'create_triggerfunction': v_database.TemplateCreateTriggerFunction().v_text,
#'drop_triggerfunction': v_database.TemplateDropTriggerFunction().v_text,
'create_view': v_database.TemplateCreateView().v_text,
'drop_view': v_database.TemplateDropView().v_text,
'create_table': v_database.TemplateCreateTable().v_text,
'alter_table': v_database.TemplateAlterTable().v_text,
'drop_table': v_database.TemplateDropTable().v_text,
'create_column': v_database.TemplateCreateColumn().v_text,
'alter_column': v_database.TemplateAlterColumn().v_text,
'drop_column': v_database.TemplateDropColumn().v_text,
'create_primarykey': v_database.TemplateCreatePrimaryKey().v_text,
'drop_primarykey': v_database.TemplateDropPrimaryKey().v_text,
'create_unique': v_database.TemplateCreateUnique().v_text,
'drop_unique': v_database.TemplateDropUnique().v_text,
'create_foreignkey': v_database.TemplateCreateForeignKey().v_text,
'drop_foreignkey': v_database.TemplateDropForeignKey().v_text,
'create_index': v_database.TemplateCreateIndex().v_text,
'drop_index': v_database.TemplateDropIndex().v_text,
#'create_trigger': v_database.TemplateCreateTrigger().v_text,
#'create_view_trigger': v_database.TemplateCreateViewTrigger().v_text,
#'alter_trigger': v_database.TemplateAlterTrigger().v_text,
#'enable_trigger': v_database.TemplateEnableTrigger().v_text,
#'disable_trigger': v_database.TemplateDisableTrigger().v_text,
#'drop_trigger': v_database.TemplateDropTrigger().v_text,
#'create_partition': v_database.TemplateCreatePartition().v_text,
#'noinherit_partition': v_database.TemplateNoInheritPartition().v_text,
#'drop_partition': v_database.TemplateDropPartition().v_text
'delete': v_database.TemplateDelete().v_text,
}
}
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_properties(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_data = json_object['p_data']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_properties = []
v_ddl = ''
try:
v_properties = v_database.GetProperties(v_data['p_schema'],v_data['p_table'],v_data['p_object'],v_data['p_type'])
for v_property in v_properties.Rows:
v_list_properties.append([v_property['Property'],v_property['Value']])
v_ddl = v_database.GetDDL(v_data['p_schema'],v_data['p_table'],v_data['p_object'],v_data['p_type'])
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'properties': v_list_properties,
'ddl': v_ddl
}
return JsonResponse(v_return)
def get_tables(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QueryTables(False,v_schema)
for v_table in v_tables.Rows:
v_table_data = {
'v_name': v_table['table_name'],
'v_has_primary_keys': v_database.v_has_primary_keys,
'v_has_foreign_keys': v_database.v_has_foreign_keys,
'v_has_uniques': v_database.v_has_uniques,
'v_has_indexes': v_database.v_has_indexes,
'v_has_checks': v_database.v_has_checks,
'v_has_excludes': v_database.v_has_excludes,
'v_has_rules': v_database.v_has_rules,
'v_has_triggers': v_database.v_has_triggers,
'v_has_partitions': v_database.v_has_partitions
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_columns = []
try:
v_columns = v_database.QueryTablesFields(v_table,False,v_schema)
for v_column in v_columns.Rows:
v_column_data = {
'v_column_name': v_column['column_name'],
'v_data_type': v_column['data_type'],
'v_data_length': v_column['data_length'],
'v_nullable': v_column['nullable']
}
v_list_columns.append(v_column_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_columns
return JsonResponse(v_return)
def get_pk(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_pk = []
try:
v_pks = v_database.QueryTablesPrimaryKeys(v_table, False, v_schema)
for v_pk in v_pks.Rows:
v_pk_data = []
v_pk_data.append(v_pk['constraint_name'])
v_list_pk.append(v_pk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_pk
return JsonResponse(v_return)
def get_pk_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_pkey = json_object['p_key']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_pk = []
try:
v_pks = v_database.QueryTablesPrimaryKeysColumns(v_pkey, v_table, False, v_schema)
for v_pk in v_pks.Rows:
v_pk_data = []
v_pk_data.append(v_pk['column_name'])
v_list_pk.append(v_pk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_pk
return JsonResponse(v_return)
def get_fks(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fk = []
try:
v_fks = v_database.QueryTablesForeignKeys(v_table, False, v_schema)
for v_fk in v_fks.Rows:
v_fk_data = []
v_fk_data.append(v_fk['constraint_name'])
v_fk_data.append(v_fk['r_table_name'])
v_fk_data.append(v_fk['delete_rule'])
v_fk_data.append(v_fk['update_rule'])
v_list_fk.append(v_fk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fk
return JsonResponse(v_return)
def get_fks_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_fkey = json_object['p_fkey']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fk = []
try:
v_fks = v_database.QueryTablesForeignKeysColumns(v_fkey, v_table, False, v_schema)
for v_fk in v_fks.Rows:
v_fk_data = []
v_fk_data.append(v_fk['r_table_name'])
v_fk_data.append(v_fk['delete_rule'])
v_fk_data.append(v_fk['update_rule'])
v_fk_data.append(v_fk['column_name'])
v_fk_data.append(v_fk['r_column_name'])
v_list_fk.append(v_fk_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fk
return JsonResponse(v_return)
def get_uniques(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_uniques = []
try:
v_uniques = v_database.QueryTablesUniques(v_table, False, v_schema)
for v_unique in v_uniques.Rows:
v_unique_data = []
v_unique_data.append(v_unique['constraint_name'])
v_list_uniques.append(v_unique_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_uniques
return JsonResponse(v_return)
def get_uniques_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_unique = json_object['p_unique']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_uniques = []
try:
v_uniques = v_database.QueryTablesUniquesColumns(v_unique, v_table, False, v_schema)
for v_unique in v_uniques.Rows:
v_unique_data = []
v_unique_data.append(v_unique['column_name'])
v_list_uniques.append(v_unique_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_uniques
return JsonResponse(v_return)
def get_indexes(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_indexes = []
try:
v_indexes = v_database.QueryTablesIndexes(v_table, False, v_schema)
for v_index in v_indexes.Rows:
v_index_data = []
v_index_data.append(v_index['index_name'])
v_index_data.append(v_index['uniqueness'])
v_list_indexes.append(v_index_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_indexes
return JsonResponse(v_return)
def get_indexes_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_index = json_object['p_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_indexes = []
try:
v_indexes = v_database.QueryTablesIndexesColumns(v_index, v_table, False, v_schema)
for v_index in v_indexes.Rows:
v_index_data = []
v_index_data.append(v_index['column_name'])
v_list_indexes.append(v_index_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_indexes
return JsonResponse(v_return)
def get_databases(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_databases = []
try:
v_databases = v_database.QueryDatabases()
for v_database in v_databases.Rows:
v_database_data = {
'v_name': v_database[0]
}
v_list_databases.append(v_database_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_databases
return JsonResponse(v_return)
def get_roles(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_roles = []
try:
v_roles = v_database.QueryRoles()
for v_role in v_roles.Rows:
v_role_data = {
'v_name': v_role['role_name']
}
v_list_roles.append(v_role_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_roles
return JsonResponse(v_return)
def get_functions(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_functions = []
try:
v_functions = v_database.QueryFunctions(False,v_schema)
for v_function in v_functions.Rows:
v_function_data = {
'v_name': v_function['name'],
'v_id': v_function['id']
}
v_list_functions.append(v_function_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_functions
return JsonResponse(v_return)
def get_function_fields(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_function = json_object['p_function']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fields = []
try:
v_fields = v_database.QueryFunctionFields(v_function,v_schema)
for v_field in v_fields.Rows:
v_field_data = {
'v_name': v_field['name'],
'v_type': v_field['type']
}
v_list_fields.append(v_field_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fields
return JsonResponse(v_return)
def get_function_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_function = json_object['p_function']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetFunctionDefinition(v_function)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_procedures(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_functions = []
try:
v_functions = v_database.QueryProcedures(False,v_schema)
for v_function in v_functions.Rows:
v_function_data = {
'v_name': v_function['name'],
'v_id': v_function['id']
}
v_list_functions.append(v_function_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_functions
return JsonResponse(v_return)
def get_procedure_fields(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_function = json_object['p_procedure']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_fields = []
try:
v_fields = v_database.QueryProcedureFields(v_function,v_schema)
for v_field in v_fields.Rows:
v_field_data = {
'v_name': v_field['name'],
'v_type': v_field['type']
}
v_list_fields.append(v_field_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_fields
return JsonResponse(v_return)
def get_procedure_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_function = json_object['p_procedure']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetProcedureDefinition(v_function)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def get_views(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_tables = []
try:
v_tables = v_database.QueryViews(False,v_schema)
for v_table in v_tables.Rows:
v_table_data = {
'v_name': v_table['table_name'],
'v_has_triggers': v_database.v_has_triggers,
}
v_list_tables.append(v_table_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_tables
return JsonResponse(v_return)
def get_views_columns(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
v_list_columns = []
try:
v_columns = v_database.QueryViewFields(v_table,False,v_schema)
for v_column in v_columns.Rows:
v_column_data = {
'v_column_name': v_column['column_name'],
'v_data_type': v_column['data_type'],
'v_data_length': v_column['data_length'],
}
v_list_columns.append(v_column_data)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = v_list_columns
return JsonResponse(v_return)
def get_view_definition(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_view = json_object['p_view']
v_schema = json_object['p_schema']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_return['v_data'] = v_database.GetViewDefinition(v_view, v_schema)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def kill_backend(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_pid = json_object['p_pid']
v_tab_id = json_object['p_tab_id']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_data = v_database.v_connection.Terminate(v_pid)
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
return JsonResponse(v_return)
def template_select(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateSelect(v_schema, v_table).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
def template_insert(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateInsert(v_schema, v_table).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
def template_update(request):
v_return = {}
v_return['v_data'] = ''
v_return['v_error'] = False
v_return['v_error_id'] = -1
#Invalid session
if not request.session.get('omnidb_session'):
v_return['v_error'] = True
v_return['v_error_id'] = 1
return JsonResponse(v_return)
v_session = request.session.get('omnidb_session')
json_object = json.loads(request.POST.get('data', None))
v_database_index = json_object['p_database_index']
v_tab_id = json_object['p_tab_id']
v_table = json_object['p_table']
v_schema = json_object['p_schema']
v_database = v_session.v_tab_connections[v_tab_id]
#Check database prompt timeout
v_timeout = v_session.DatabaseReachPasswordTimeout(int(v_database_index))
if v_timeout['timeout']:
v_return['v_data'] = {'password_timeout': True, 'message': v_timeout['message'] }
v_return['v_error'] = True
return JsonResponse(v_return)
try:
v_template = v_database.TemplateUpdate(v_schema, v_table).v_text
except Exception as exc:
v_return['v_data'] = {'password_timeout': True, 'message': str(exc) }
v_return['v_error'] = True
return JsonResponse(v_return)
v_return['v_data'] = {
'v_template': v_template
}
return JsonResponse(v_return)
| 33.710586
| 121
| 0.657426
| 5,921
| 44,262
| 4.522885
| 0.035805
| 0.105601
| 0.108738
| 0.078641
| 0.854294
| 0.848021
| 0.843428
| 0.839806
| 0.838872
| 0.83652
| 0
| 0.0016
| 0.223239
| 44,262
| 1,312
| 122
| 33.73628
| 0.777319
| 0.051195
| 0
| 0.834555
| 0
| 0
| 0.15585
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028272
| false
| 0.084817
| 0.011518
| 0
| 0.15288
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
5204fd498d504d54d600190862e2794d9a453b77
| 21,038
|
py
|
Python
|
test/test_train.py
|
youmingdeng/DMLPlayground
|
37070c10278597a4413303061d60d69ce2c4f87e
|
[
"Apache-2.0"
] | 1
|
2021-11-11T16:05:56.000Z
|
2021-11-11T16:05:56.000Z
|
test/test_train.py
|
youmingdeng/DMLPlayground
|
37070c10278597a4413303061d60d69ce2c4f87e
|
[
"Apache-2.0"
] | null | null | null |
test/test_train.py
|
youmingdeng/DMLPlayground
|
37070c10278597a4413303061d60d69ce2c4f87e
|
[
"Apache-2.0"
] | 1
|
2020-04-01T04:50:36.000Z
|
2020-04-01T04:50:36.000Z
|
from unittest import TestCase
from os.path import dirname, realpath, join
from os import remove
import glob
try:
# python 3.4+ should use builtin unittest.mock not mock package
from unittest.mock import patch
except ImportError:
from mock import patch
from train_triplet_semihard import train_triplet_semihard, parse_args as parse_triplet_semihard_args
from train_liftedstruct import train_liftedstruct, parse_args as parse_liftedstruct_args
from train_npairs import train_npairs, parse_args as parse_npairs_args
from train_clusterloss import train_clusterloss, parse_args as parse_clusterloss_args
from train_margin import train_margin, parse_args as parse_margin_args
from train_prototype import train_prototype, parse_args as parse_prototype_args
from train_proxy import train_proxy, parse_args as parse_proxy_args
from train_angular import train_angular, parse_args as parse_angular_args
from train_normproxy import train_normproxy, parse_args as parse_normproxy_args
from train_dreml import train_dreml, parse_args as parse_dreml_args
from train_rankedlistloss import train_rankedlist, parse_args as parse_rankedlist_args
from train_discriminative import train_discriminative, parse_args as parse_discriminative_args
class TestTrain_triplet_semihard(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_triplet_semihard*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_triplet_semihard(self):
testargs = ['train_triplet_semihard',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_triplet_semihard',
'--batch-size', '4'
]
with patch('sys.argv', testargs):
train_triplet_semihard(parse_triplet_semihard_args())
def test_train_triplet_semihard_resnet(self):
testargs = ['train_triplet_semihard',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--logfile', 'None',
'--epochs', '1',
'--num-workers', '0',
'--model', 'resnet50_v2',
'--save-model-prefix', 'None',
'--number-of-runs', '2',
'--batch-size', '4'
]
with patch('sys.argv', testargs):
train_triplet_semihard(parse_triplet_semihard_args())
class TestTrain_liftedstruct(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_liftedstruct*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_liftedstruct(self):
testargs = ['train_liftedstruct',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_liftedstruct',
'--batch-size', '4'
]
with patch('sys.argv', testargs):
train_liftedstruct(parse_liftedstruct_args())
def test_train_liftedstruct_resnet(self):
testargs = ['train_liftedstruct',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--logfile', 'None',
'--epochs', '1',
'--num-workers', '0',
'--model', 'resnet50_v2',
'--save-model-prefix', 'None',
'--number-of-runs', '2',
'--batch-size', '4'
]
with patch('sys.argv', testargs):
train_triplet_semihard(parse_triplet_semihard_args())
class TestTrain_npairs(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_npairs*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_npairs(self):
testargs = ['train_npairs',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--epoch-length', '10',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_npairs',
'--batch-size', '4'
]
with patch('sys.argv', testargs):
train_npairs(parse_npairs_args())
def test_train_npairs_resnet(self):
testargs = ['train_npairs',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--logfile', 'None',
'--epochs', '1',
'--num-workers', '0',
'--model', 'resnet50_v2',
'--save-model-prefix', 'None',
'--number-of-runs', '2',
'--batch-size', '4',
'--epoch-length', '10',
]
with patch('sys.argv', testargs):
train_npairs(parse_npairs_args())
class TestTrain_clusterloss(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_clusterloss*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_clusterloss(self):
testargs = ['train_clusterloss',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '10',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_clusterloss',
'--batch-size', '4',
'--batch-k', '2',
]
with patch('sys.argv', testargs):
train_clusterloss(parse_clusterloss_args())
def test_train_clusterloss_resnet50_v2(self):
testargs = ['train_clusterloss',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '0',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_clusterloss',
'--batch-size', '4',
'--batch-k', '2',
'--model', 'resnet50_v2',
]
with patch('sys.argv', testargs):
train_clusterloss(parse_clusterloss_args())
class TestTrain_margin(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_margin*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_margin(self):
testargs = ['train_margin',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '10',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_margin',
'--batch-size', '4',
'--batch-k', '2',
]
with patch('sys.argv', testargs):
train_margin(parse_margin_args())
def test_train_margin_resnet50_v2(self):
testargs = ['train_margin',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '10',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_margin',
'--batch-size', '4',
'--batch-k', '2',
'--model', 'resnet50_v2',
'--lr-beta', '0',
]
with patch('sys.argv', testargs):
train_margin(parse_margin_args())
class TestTrain_prototype(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_prototype*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_prototype(self):
testargs = ['train_prototype',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '5',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_prototype',
'--batch-size', '10',
'--nc', '3',
'--nq', '2',
'--ns', '4'
]
with patch('sys.argv', testargs):
train_prototype(parse_prototype_args())
def test_train_prototype_resnet50_v2(self):
testargs = ['train_prototype',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '5',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_prototype',
'--batch-size', '10',
'--nc', '3',
'--nq', '2',
'--ns', '4',
'--model', 'resnet50_v2',
]
with patch('sys.argv', testargs):
train_prototype(parse_prototype_args())
class TestTrain_proxy(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_proxy*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_proxy(self):
testargs = ['train_proxy',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '0',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_proxy',
'--batch-size', '10',
]
with patch('sys.argv', testargs):
train_proxy(parse_proxy_args())
def test_train_proxy_iter(self):
testargs = ['train_proxy',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--iteration-per-epoch', '5',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_proxy',
'--batch-size', '10',
]
with patch('sys.argv', testargs):
train_proxy(parse_proxy_args())
def test_train_proxy_resnet50_v2(self):
testargs = ['train_proxy',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_proxy',
'--batch-size', '10',
'--model', 'resnet50_v2',
]
with patch('sys.argv', testargs):
train_proxy(parse_proxy_args())
class TestTrain_angular(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_angular*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_angular(self):
testargs = ['train_angular',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--epoch-length', '10',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_angular',
'--batch-size', '4'
]
with patch('sys.argv', testargs):
train_angular(parse_angular_args())
def test_train_angular_resnet(self):
testargs = ['train_angular',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--logfile', 'None',
'--epochs', '1',
'--epoch-length', '10',
'--num-workers', '0',
'--model', 'resnet50_v2',
'--save-model-prefix', 'None',
'--number-of-runs', '2',
'--batch-size', '4'
]
with patch('sys.argv', testargs):
train_angular(parse_angular_args())
class TestTrain_normproxy(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_normproxy*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_normproxy(self):
testargs = ['train_normproxy',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_normproxy',
'--batch-size', '10',
]
with patch('sys.argv', testargs):
train_normproxy(parse_normproxy_args())
def test_train_normproxy_resnet(self):
testargs = ['train_normproxy',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_normproxy',
'--batch-size', '10',
'--model', 'resnet50_v2',
'--number-of-runs', '2',
]
with patch('sys.argv', testargs):
train_normproxy(parse_normproxy_args())
class TestTrain_dreml(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_dreml*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_dreml(self):
testargs = ['train_dreml',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_dreml',
'--batch-size', '10',
'--number-of-ensembles', '3',
'--meta-classes', '3',
]
with patch('sys.argv', testargs):
train_dreml(parse_dreml_args())
def test_train_dreml_resnet(self):
testargs = ['train_dreml',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_dreml',
'--batch-size', '10',
'--model', 'resnet50_v2',
'--number-of-ensembles', '3',
'--meta-classes', '3',
'--number-of-runs', '2',
]
with patch('sys.argv', testargs):
train_dreml(parse_dreml_args())
class TestTrain_rankedlistloss(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_rankedlistloss*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_rankedlistloss(self):
testargs = ['train_rankedlist',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_rankedlistloss',
'--batch-size', '12',
'--batch-k', '3',
'--iteration-per-epoch', '10',
]
with patch('sys.argv', testargs):
train_rankedlist(parse_rankedlist_args())
def test_train_rankedlistloss_resnet(self):
testargs = ['train_rankedlist',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_rankedlistloss',
'--batch-size', '12',
'--batch-k', '3',
'--model', 'resnet50_v2',
'--number-of-runs', '2',
'--iteration-per-epoch', '10',
]
with patch('sys.argv', testargs):
train_rankedlist(parse_rankedlist_args())
class TestTrain_discriminative(TestCase):
def setUp(self):
def cleanup():
# erase param files created during training
param_files = glob.glob('unittest_model_discriminative*.params')
for pfile in param_files:
remove(pfile)
self.addCleanup(cleanup)
def test_train_discriminative(self):
testargs = ['train_discriminative',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_discriminative',
'--batch-size', '12',
]
with patch('sys.argv', testargs):
train_discriminative(parse_discriminative_args())
def test_train_discriminative_resnet(self):
testargs = ['train_discriminative',
'--dataset', 'miniCUB',
'--data-path', join(dirname(realpath(__file__)), 'microcub'),
'--epochs', '1',
'--num-workers', '0',
'--logfile', 'None',
'--save-model-prefix', 'unittest_model_discriminative',
'--batch-size', '12',
'--model', 'resnet50_v2',
'--number-of-runs', '2',
]
with patch('sys.argv', testargs):
train_discriminative(parse_discriminative_args())
| 40.457692
| 100
| 0.485597
| 1,847
| 21,038
| 5.283703
| 0.056849
| 0.066605
| 0.030741
| 0.056358
| 0.811866
| 0.803463
| 0.803463
| 0.795061
| 0.791372
| 0.780203
| 0
| 0.014113
| 0.373562
| 21,038
| 519
| 101
| 40.535645
| 0.726383
| 0.026856
| 0
| 0.775824
| 0
| 0
| 0.239063
| 0.053424
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107692
| false
| 0
| 0.041758
| 0
| 0.175824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
520a3fe88c54e8711764b08dc4d3995b9e0ecaf0
| 22,387
|
py
|
Python
|
mltools/nnet.py
|
YYMatUCI/CS273AHW4
|
08c3fdbef0ff6b17a4e9ec33a411ea2c26e4bb5e
|
[
"MIT"
] | null | null | null |
mltools/nnet.py
|
YYMatUCI/CS273AHW4
|
08c3fdbef0ff6b17a4e9ec33a411ea2c26e4bb5e
|
[
"MIT"
] | null | null | null |
mltools/nnet.py
|
YYMatUCI/CS273AHW4
|
08c3fdbef0ff6b17a4e9ec33a411ea2c26e4bb5e
|
[
"MIT"
] | null | null | null |
import numpy as np
from .base import classifier
from .base import regressor
from .utils import toIndex, fromIndex, to1ofK, from1ofK
from numpy import asarray as arr
from numpy import atleast_2d as twod
from numpy import asmatrix as mat
################################################################################
## NNETCLASSIFY ################################################################
################################################################################
def _add1(X):
return np.hstack( (np.ones((X.shape[0],1)),X) )
class nnetClassify(classifier):
"""A simple neural network classifier
Attributes:
classes: list of class (target) identifiers for the classifier
layers : list of layer sizes [N,S1,S2,...,C], where N = # of input features, S1 = # of hidden nodes
in layer 1, ... , and C = the number of classes, or 1 for a binary classifier
weights: list of numpy arrays containing each layer's weights, size e.g. (S1,N), (S2,S1), etc.
"""
def __init__(self, *args, **kwargs):
"""Constructor for nnetClassify (neural net classifier).
Parameters: see the "train" function; calls "train" if arguments passed
Properties:
classes : list of identifiers for each class
wts : list of coefficients (weights) for each layer of the NN
activation : function for layer activation function & derivative
"""
self.classes = []
self.wts = []
#self.set_activation(activation.lower())
#self.init_weights(sizes, init.lower(), X, Y)
self.Sig = lambda Z: np.tanh(Z) ## TODO: make flexible
self.dSig= lambda Z: 1.0 - np.tanh(Z)**2 # (internal layers nonlinearity & derivative)
#self.Sig0 = self.Sig
#self.dSig0= self.dSig
self.Sig0 = lambda Z: 1.0/(1.0 + np.exp(-Z)) # final layer nonlinearity & derivative
self.dSig0= lambda Z: np.exp(-Z) / (1.0+np.exp(-Z))**2
if len(args) or len(kwargs): # if we were given optional arguments,
self.train(*args, **kwargs) # just pass them through to "train"
def __repr__(self):
to_return = 'Multi-layer perceptron (neural network) classifier\nLayers [{}]'.format(self.get_layers())
return to_return
def __str__(self):
to_return = 'Multi-layer perceptron (neural network) classifier\nLayers [{}]'.format(self.get_layers())
return to_return
def nLayers(self):
return len(self.wts)
@property
def layers(self):
"""Return list of layer sizes, [N,H1,H2,...,C]
N = # of input features
Hi = # of hidden nodes in layer i
C = # of output nodes (usually # of classes or 1)
"""
if len(self.wts):
layers = [self.wts[l].shape[1] for l in range(len(self.wts))]
layers.append( self.wts[-1].shape[0] )
else:
layers = []
return layers
@layers.setter
def layers(self, layers):
raise NotImplementedError
# adapt / change size of weight matrices (?)
## CORE METHODS ################################################################
def predictSoft(self, X):
"""Make 'soft' (per-class confidence) predictions of the neural network on data X.
Args:
X : MxN numpy array containing M data points with N features each
Returns:
P : MxC numpy array of C class probabilities for each of the M data
"""
X = arr(X) # convert to numpy if needed
L = self.nLayers() # get number of layers
Z = _add1(X) # initialize: input features + constant term
for l in range(L - 1): # for all *except output* layer:
Z = Z.dot( self.wts[l].T ) # compute linear response of next layer
Z = _add1( self.Sig(Z) ) # apply activation function & add constant term
Z = Z.dot( self.wts[L - 1].T ) # compute output layer linear response
Z = self.Sig0(Z) # apply output layer activation function
if Z.shape[1]==1: Z = np.hstack( (2.0*self.Sig0(0.0)-Z,Z) ) # if binary classifier, make Mx2
return Z
def train(self, X, Y, init='zeros', stepsize=.01, stopTol=1e-4, stopIter=5000):
"""Train the neural network.
Args:
X : MxN numpy array containing M data points with N features each
Y : Mx1 numpy array of targets (class labels) for each data point in X
sizes : [Nin, Nh1, ... , Nout]
Nin is the number of features, Nout is the number of outputs,
which is the number of classes. Member weights are {W1, ... , WL-1},
where W1 is Nh1 x Nin, etc.
init : str
'none', 'zeros', or 'random'. inits the neural net weights.
stepsize : scalar
The stepsize for gradient descent (decreases as 1 / iter).
stopTol : scalar
Tolerance for stopping criterion.
stopIter : int
The maximum number of steps before stopping.
activation : str
'logistic', 'htangent', or 'custom'. Sets the activation functions.
"""
if self.wts[0].shape[1] - 1 != len(X[0]):
raise ValueError('layer[0] must equal the number of columns of X (number of features)')
self.classes = self.classes if len(self.classes) else np.unique(Y)
if len(self.classes) != self.wts[-1].shape[0]: # and (self.wts[-1].shape[0]!=1 or len(self.classes)!=2):
raise ValueError('layers[-1] must equal the number of classes in Y, or 1 for binary Y')
M,N = mat(X).shape # d = dim of data, n = number of data points
C = len(self.classes) # number of classes
L = len(self.wts) # get number of layers
Y_tr_k = to1ofK(Y,self.classes) # convert Y to 1-of-K format
# outer loop of stochastic gradient descent
it = 1 # iteration number
nextPrint = 1 # next time to print info
done = 0 # end of loop flag
J01, Jsur = [],[] # misclassification rate & surrogate loss values
while not done:
step_i = float(stepsize) / it # step size evolution; classic 1/t decrease
# stochastic gradient update (one pass)
for j in range(M):
A,Z = self.__responses(twod(X[j,:])) # compute all layers' responses, then backdrop
delta = (Z[L] - Y_tr_k[j,:]) * arr(self.dSig0(Z[L])) # take derivative of output layer
for l in range(L - 1, -1, -1):
grad = delta.T.dot( Z[l] ) # compute gradient on current layer wts
delta = delta.dot(self.wts[l]) * arr(self.dSig(Z[l])) # propagate gradient down
delta = delta[:,1:] # discard constant feature
self.wts[l] -= step_i * grad # take gradient step on current layer wts
J01.append( self.err_k(X, Y_tr_k) ) # error rate (classification)
Jsur.append( self.mse_k(X, Y_tr_k) ) # surrogate (mse on output)
if it >= nextPrint:
print('it {} : Jsur = {}, J01 = {}'.format(it,Jsur[-1],J01[-1]))
nextPrint *= 2
# check if finished
done = (it > 1) and (np.abs(Jsur[-1] - Jsur[-2]) < stopTol) or it >= stopIter
it += 1
def err_k(self, X, Y):
"""Compute misclassification error rate. Assumes Y in 1-of-k form. """
return self.err(X, from1ofK(Y,self.classes).ravel())
def mse(self, X, Y):
"""Compute mean squared error of predictor 'obj' on test data (X,Y). """
return mse_k(X, to1ofK(Y))
def mse_k(self, X, Y):
"""Compute mean squared error of predictor; assumes Y is in 1-of-k format. """
return np.power(Y - self.predictSoft(X), 2).sum(1).mean(0)
## MUTATORS ####################################################################
def setActivation(self, method, sig=None, d_sig=None, sig_0=None, d_sig_0=None):
# def setActivation(self, method, sig=None, sig0=None):
"""
This method sets the activation functions.
Parameters
----------
method : string, {'logistic' , 'htangent', 'custom'} -- which activation type
Optional arguments for "custom" activation:
sig : function object F(z) returns activation function & its derivative at z (as a tuple)
sig0: activation function object F(z) for final layer of the nnet
"""
method = method.lower()
if method == 'logistic':
self.Sig = lambda z: twod(1 / (1 + np.exp(-z)))
self.dSig = lambda z: twod(np.multiply(self.Sig(z), (1 - self.Sig(z))))
# self.sig_0 = self.sig
# self.d_sig_0 = self.d_sig
elif method == 'htangent':
self.Sig = lambda z: twod(np.tanh(z))
self.dSig = lambda z: twod(1 - np.power(np.tanh(z), 2))
# self.sig_0 = self.sig
# self.d_sig_0 = self.d_sig
elif method == 'custom':
self.Sig = sig
self.dSig = d_sig
if sig_0 is not None:
self.sig_0 = sig_0
if d_sig_0 is not None:
self.d_sig_0 = d_sig_0
else:
raise ValueError('NNetClassify.set_activation: ' + str(method) + ' is not a valid option for method')
self.activation = method
def set_layers(self, sizes, init='random'):
"""
Set layers sizes to sizes.
Parameters
----------
sizes : [int]
List containing sizes.
init : str (optional)
Weight initialization method.
"""
self.init_weights(sizes, init, None, None)
def init_weights(self, sizes, init, X, Y):
"""
This method sets layer sizes and initializes the weights of the neural network
sizes = [Ninput, N1, N2, ... , Noutput], where Ninput = # of input features, and Nouput = # classes
init = {'zeros', 'random'} : initialize to all zeros or small random values (breaks symmetry)
"""
init = init.lower()
if init == 'none':
pass
elif init == 'zeros':
self.wts = [np.zeros((sizes[i + 1],sizes[i] + 1)) for i in range(len(sizes) - 1)]
elif init == 'random':
self.wts = [.0025 * np.random.randn(sizes[i+1],sizes[i]+1) for i in range(len(sizes) - 1)]
else:
raise ValueError('NNetClassify.init_weights: ' + str(init) + ' is not a valid option for init')
## HELPERS #####################################################################
def __responses(self, Xin):
"""
Helper function that gets linear sum from previous layer (A) and
saturated activation responses (Z) for a data point. Used in:
train
"""
L = len(self.wts)
A = [arr([1.0])] # initialize (layer 0)
Z = [_add1(Xin)] # input to next layer: original features
for l in range(1, L):
A.append( Z[l - 1].dot(self.wts[l - 1].T) ) # linear response of previous later
Z.append( _add1(self.Sig(A[l])) ) # apply activation & add constant feature
A.append( Z[L - 1].dot(self.wts[L - 1].T) ) # linear response, output layer
Z.append( self.Sig0(A[L]) ) # apply activation (saturate for classifier, not regressor)
return A,Z
################################################################################
################################################################################
################################################################################
class nnetRegress(regressor):
"""A simple neural network regressor
Attributes:
layers (list): layer sizes [N,S1,S2,...,C], where N = # of input features,
S1 = # of hidden nodes in layer 1, ... , and C = the number of
classes, or 1 for a binary classifier
weights (list): list of numpy arrays containing each layer's weights, sizes
(S1,N), (S2,S1), etc.
"""
def __init__(self, *args, **kwargs):
"""Constructor for nnetRegress (neural net regressor).
Parameters: see the "train" function; calls "train" if arguments passed
Properties:
wts : list of coefficients (weights) for each layer of the NN
activation : function for layer activation function & derivative
"""
self.wts = []
#self.set_activation(activation.lower())
#self.init_weights(sizes, init.lower(), X, Y)
self.Sig = lambda Z: np.tanh(Z) ## TODO: make flexible
self.dSig= lambda Z: 1.0 - np.tanh(Z)**2 # (internal layers nonlinearity & derivative)
#self.Sig0 = self.Sig
#self.dSig0= self.dSig
self.Sig0 = lambda Z: Z # final layer nonlinearity & derivative
self.dSig0= lambda Z: 1.0+0*Z #
if len(args) or len(kwargs): # if we were given optional arguments,
self.train(*args, **kwargs) # just pass them through to "train"
def __repr__(self):
to_return = 'Multi-layer perceptron (neural network) regressor\nLayers [{}]'.format(self.get_layers())
return to_return
def __str__(self):
to_return = 'Multi-layer perceptron (neural network) regressor\nLayers [{}]'.format(self.get_layers())
return to_return
def nLayers(self):
return len(self.wts)
@property
def layers(self):
"""Return list of layer sizes, [N,H1,H2,...,C]
N = # of input features
Hi = # of hidden nodes in layer i
C = # of output nodes (usually 1)
"""
if len(self.wts):
layers = [self.wts[l].shape[1] for l in range(len(self.wts))]
layers.append( self.wts[-1].shape[0] )
else:
layers = []
return layers
@layers.setter
def layers(self, layers):
raise NotImplementedError
# adapt / change size of weight matrices (?)
## CORE METHODS ################################################################
def predict(self, X):
"""Make predictions of the neural network on data X.
"""
X = arr(X) # convert to numpy if needed
L = self.nLayers() # get number of layers
Z = _add1(X) # initialize: input features + constant term
for l in range(L - 1): # for all *except output* layer:
Z = Z.dot( self.wts[l].T ) # compute linear response of next layer
Z = _add1( self.Sig(Z) ) # apply activation function & add constant term
Z = Z.dot( self.wts[L - 1].T ) # compute output layer linear response
Z = self.Sig0(Z) # apply output layer activation function
return Z
def train(self, X, Y, init='zeros', stepsize=.01, stopTol=1e-4, stopIter=5000):
"""Train the neural network.
Args:
X : MxN numpy array containing M data points with N features each
Y : Mx1 numpy array of targets for each data point in X
sizes (list of int): [Nin, Nh1, ... , Nout]
Nin is the number of features, Nout is the number of outputs,
which is the number of target dimensions (usually 1). Weights are {W1, ... , WL-1},
where W1 is Nh1 x Nin, etc.
init (str): 'none', 'zeros', or 'random'. inits the neural net weights.
stepsize (float): The stepsize for gradient descent (decreases as 1 / iter).
stopTol (float): Tolerance for stopping criterion.
stopIter (int): The maximum number of steps before stopping.
activation (str): 'logistic', 'htangent', or 'custom'. Sets the activation functions.
"""
if self.wts[0].shape[1] - 1 != len(X[0]):
raise ValueError('layer[0] must equal the number of columns of X (number of features)')
if self.wts[-1].shape[0] > 1 and self.wts[-1].shape[0] != Y.shape[1]:
raise ValueError('layers[-1] must equal the number of classes in Y, or 1 for binary Y')
M,N = arr(X).shape # d = dim of data, n = number of data points
L = len(self.wts) # get number of layers
Y = arr(Y)
Y2d = Y if len(Y.shape)>1 else Y[:,np.newaxis]
# outer loop of stochastic gradient descent
it = 1 # iteration number
nextPrint = 1 # next time to print info
done = 0 # end of loop flag
Jsur = [] # misclassification rate & surrogate loss values
while not done:
step_i = (2.0*stepsize) / (2.0+it) # step size evolution; classic 1/t decrease
# stochastic gradient update (one pass)
for j in range(M):
A,Z = self.__responses(twod(X[j,:])) # compute all layers' responses, then backdrop
delta = (Z[L] - Y2d[j,:]) * arr(self.dSig0(Z[L])) # take derivative of output layer
for l in range(L - 1, -1, -1):
grad = delta.T.dot( Z[l] ) # compute gradient on current layer wts
delta = delta.dot(self.wts[l]) * arr(self.dSig(Z[l])) # propagate gradient down
delta = delta[:,1:] # discard constant feature
self.wts[l] -= step_i * grad # take gradient step on current layer wts
Jsur.append( self.mse(X, Y2d) ) # surrogate (mse on output)
if it >= nextPrint:
print('it {} : J = {}'.format(it,Jsur[-1]))
nextPrint *= 2
# check if finished
done = (it > 1) and (np.abs(Jsur[-1] - Jsur[-2]) < stopTol) or it >= stopIter
it += 1
## MUTATORS ####################################################################
#def set_activation(self, method, sig=None, d_sig=None, sig_0=None, d_sig_0=None):
def setActivation(self, method, sig=None, sig0=None):
""" This method sets the activation functions.
Args:
method : string, {'logistic' , 'htangent', 'custom'} -- which activation type
Optional arguments for "custom" activation:
sig : f'n object F(z) returns activation function & its derivative at z (as a tuple)
sig0: activation function object F(z) for final layer of the nnet
"""
raise NotImplementedError # unfinished / tested
method = method.lower()
if method == 'logistic':
self.sig = lambda z: twod(1 / (1 + np.exp(-z)))
self.d_sig = lambda z: twod(np.multiply(self.sig(z), (1 - self.sig(z))))
self.sig_0 = self.sig
self.d_sig_0 = self.d_sig
elif method == 'htangent':
self.sig = lambda z: twod(np.tanh(z))
self.d_sig = lambda z: twod(1 - np.power(np.tanh(z), 2))
self.sig_0 = self.sig
self.d_sig_0 = self.d_sig
elif method == 'custom':
self.sig = sig
self.d_sig = d_sig
self.sig_0 = sig_0
self.d_sig_0 = d_sig_0
else:
raise ValueError('nnetRegress.set_activation: ' + str(method) + ' is not a valid option for method')
self.activation = method
def set_layers(self, sizes, init='random'):
"""Set layers sizes to sizes.
Args:
sizes (int): List containing sizes.
init (str, optional): Weight initialization method.
"""
self.init_weights(sizes, init, None, None)
def init_weights(self, sizes, init, X, Y):
"""Set layer sizes and initialize the weights of the neural network
Args:
sizes (list of int): [Nin, N1, N2, ... , Nout], where Nin = # of input features, and Nou = # classes
init (str): {'zeros', 'random'} initialize to all zeros or small random values (breaks symmetry)
"""
init = init.lower()
if init == 'none':
pass
elif init == 'zeros':
self.wts = arr([np.zeros((sizes[i + 1],sizes[i] + 1)) for i in range(len(sizes) - 1)], dtype=object)
elif init == 'random':
self.wts = [.0025 * np.random.randn(sizes[i+1],sizes[i]+1) for i in range(len(sizes) - 1)]
else:
raise ValueError('nnetRegress.init_weights: ' + str(init) + ' is not a valid option for init')
## HELPERS #####################################################################
def __responses(self, Xin):
"""
Helper function that gets linear sum from previous layer (A) and
saturated activation responses (Z) for a data point. Used in:
train
"""
L = len(self.wts)
A = [arr([1.0])] # initialize (layer 0)
Z = [_add1(Xin)] # input to next layer: original features
for l in range(1, L):
A.append( Z[l - 1].dot(self.wts[l - 1].T) ) # linear response of previous later
Z.append( _add1(self.Sig(A[l])) ) # apply activation & add constant feature
A.append( Z[L - 1].dot(self.wts[L - 1].T) ) # linear response, output layer
Z.append( self.Sig0(A[L]) ) # apply activation (saturate for classifier, not regressor)
return A,Z
################################################################################
################################################################################
################################################################################
| 41.304428
| 115
| 0.518158
| 2,775
| 22,387
| 4.133333
| 0.11964
| 0.023191
| 0.009765
| 0.00959
| 0.847602
| 0.838187
| 0.824237
| 0.819355
| 0.813078
| 0.778204
| 0
| 0.017939
| 0.325189
| 22,387
| 541
| 116
| 41.380776
| 0.741312
| 0.390271
| 0
| 0.716102
| 0
| 0
| 0.077903
| 0.009247
| 0
| 0
| 0
| 0.003697
| 0
| 1
| 0.118644
| false
| 0.008475
| 0.029661
| 0.012712
| 0.224576
| 0.008475
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
527b73aee44ee88ced39668230059e2d1d7deb50
| 4,317
|
py
|
Python
|
src/vision/Main.py
|
shockwave4488/FRC-2019-Public
|
174ac97c000d915ed96ec839efb480c0ab1f11e0
|
[
"MIT"
] | null | null | null |
src/vision/Main.py
|
shockwave4488/FRC-2019-Public
|
174ac97c000d915ed96ec839efb480c0ab1f11e0
|
[
"MIT"
] | null | null | null |
src/vision/Main.py
|
shockwave4488/FRC-2019-Public
|
174ac97c000d915ed96ec839efb480c0ab1f11e0
|
[
"MIT"
] | null | null | null |
import libjevois as jevois
import cv2
import numpy as np
class Tests:
def process(self, inframe, outframe):
img = inframe.getCvBGR()
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
lowerMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
upperMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
mask = cv2.bitwise_or(lowerMask, upperMask)
colorFiltered = cv2.bitwise_and(img, img, mask=upperMask)
image, contours, hierarchy = cv2.findContours(cv2.split(colorFiltered)[2], cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_TC89_KCOS)
goodContours = []
for contour in contours:
currentContour = cv2.convexHull(contour, False)
poly = cv2.approxPolyDP(currentContour, 3.5, True)
if len(poly) < 4 or len(poly) > 8:
continue
lowX = poly[0][0][0]
highX = poly[0][0][0]
lowY = poly[0][0][1]
highY = poly[0][0][1]
for point in poly:
if point[0][0] < lowX:
lowX = point[0][0]
if point[0][0] > highX:
highX = point[0][0]
if point[0][1] < lowY:
lowY = point[0][1]
if point[0][1] > highY:
highY = point[0][1]
width = highX - lowX
height = highY - lowY
centerX = (lowX + highX) / 2
centerY = (lowY + highY) / 2
ratio = width/height
if ratio > 1 and ratio < 5:
img = cv2.drawContours(img, [currentContour], 0, (0,0,255), 3)
goodContours.append([contour, centerX, centerY])
if len(goodContours) == 1 or len(goodContours) == 2:
avgX = 0
avgY = 0
for contour in goodContours:
avgX += contour[1]
avgY += contour[2]
avgX /= len(goodContours)
avgY /= len(goodContours)
jevois.sendSerial("&" + str(160 - avgX) + "," + str(120 - avgY))
else:
jevois.sendSerial("&None")
outframe.sendCv(img)
def processNoUSB(self, inframe):
img = inframe.getCvBGR()
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
lowerMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
upperMask = cv2.inRange(hsv, np.array([60, 120, 240]), np.array([100, 255, 255]))
mask = cv2.bitwise_or(lowerMask, upperMask)
colorFiltered = cv2.bitwise_and(img, img, mask=upperMask)
image, contours, hierarchy = cv2.findContours(cv2.split(colorFiltered)[2], cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_TC89_KCOS)
goodContours = []
for contour in contours:
currentContour = cv2.convexHull(contour, False)
poly = cv2.approxPolyDP(currentContour, 3.5, True)
if len(poly) < 4 or len(poly) > 8:
continue
lowX = poly[0][0][0]
highX = poly[0][0][0]
lowY = poly[0][0][1]
highY = poly[0][0][1]
for point in poly:
if point[0][0] < lowX:
lowX = point[0][0]
if point[0][0] > highX:
highX = point[0][0]
if point[0][1] < lowY:
lowY = point[0][1]
if point[0][1] > highY:
highY = point[0][1]
width = highX - lowX
height = highY - lowY
centerX = (lowX + highX) / 2
centerY = (lowY + highY) / 2
ratio = width/height
if ratio > 1 and ratio < 5:
img = cv2.drawContours(img, [currentContour], 0, (0,0,255), 3)
goodContours.append([contour, centerX, centerY])
if len(goodContours) == 1 or len(goodContours) == 2:
avgX = 0
avgY = 0
for contour in goodContours:
avgX += contour[1]
avgY += contour[2]
avgX /= len(goodContours)
avgY /= len(goodContours)
jevois.sendSerial("&" + str(160 - avgX) + "," + str(120 - avgY))
else:
jevois.sendSerial("&None")
| 41.114286
| 128
| 0.495483
| 490
| 4,317
| 4.336735
| 0.17551
| 0.022588
| 0.022588
| 0.028235
| 0.939294
| 0.939294
| 0.939294
| 0.939294
| 0.939294
| 0.939294
| 0
| 0.079359
| 0.378272
| 4,317
| 105
| 129
| 41.114286
| 0.71237
| 0
| 0
| 0.927835
| 0
| 0
| 0.003242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020619
| false
| 0
| 0.030928
| 0
| 0.061856
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bfdb6e03802358cb7f8e76820e1b2aed148e88a8
| 10,297
|
py
|
Python
|
src/models/networks.py
|
herbiebradley/CycleGAN-Tensorflow
|
495198b4473118b023891fb8e19465c0219c0907
|
[
"MIT"
] | 6
|
2019-05-29T17:33:56.000Z
|
2021-05-13T20:03:26.000Z
|
src/models/networks.py
|
herbiebradley/CycleGAN-Tensorflow
|
495198b4473118b023891fb8e19465c0219c0907
|
[
"MIT"
] | 3
|
2019-12-28T18:49:41.000Z
|
2020-09-26T08:50:03.000Z
|
src/models/networks.py
|
herbiebradley/CycleGAN-Tensorflow
|
495198b4473118b023891fb8e19465c0219c0907
|
[
"MIT"
] | 3
|
2019-12-11T03:44:59.000Z
|
2020-04-29T06:06:40.000Z
|
import tensorflow as tf
"""
This file defines the CycleGAN generator and discriminator.
Options are included for extra skips, instance norm, dropout, and resize conv instead of deconv
"""
class Encoder(tf.keras.Model):
def __init__(self, opt):
super(Encoder, self).__init__()
self.use_dropout = opt.use_dropout
self.norm = opt.instance_norm
self.training = opt.training
if self.use_dropout:
self.norm = False # We don't want to combine instance normalisation and dropout.
self.dropout = tf.keras.layers.Dropout(opt.dropout_prob)
self.conv1 = tf.keras.layers.Conv2D(opt.ngf, kernel_size=7, strides=1,
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv2 = tf.keras.layers.Conv2D(opt.ngf * 2, kernel_size=3, strides=2, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv3 = tf.keras.layers.Conv2D(opt.ngf * 4, kernel_size=3, strides=2, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
def call(self, inputs):
# Reflection padding is used to reduce artifacts.
x = tf.pad(inputs, [[0, 0], [3, 3], [3, 3], [0, 0]], 'REFLECT')
x = self.conv1(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = tf.nn.relu(x)
if self.use_dropout:
x = self.dropout(x, training=self.training)
x = self.conv2(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = tf.nn.relu(x)
if self.use_dropout:
x = self.dropout(x, training=self.training)
x = self.conv3(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = tf.nn.relu(x)
if self.use_dropout:
x = self.dropout(x, training=self.training)
return x
class Residual(tf.keras.Model):
def __init__(self, opt):
super(Residual, self).__init__()
self.use_dropout = opt.use_dropout
self.norm = opt.instance_norm
self.training = opt.training
if self.use_dropout:
self.norm = False
self.dropout = tf.keras.layers.Dropout(opt.dropout_prob)
self.conv1 = tf.keras.layers.Conv2D(opt.ngf * 4, kernel_size=3, strides=1,
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv2 = tf.keras.layers.Conv2D(opt.ngf * 4, kernel_size=3, strides=1,
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
def call(self, inputs):
x = tf.pad(inputs, [[0, 0], [1, 1], [1, 1], [0, 0]], 'REFLECT')
x = self.conv1(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = tf.nn.relu(x)
if self.use_dropout:
x = self.dropout(x, training=self.training)
x = tf.pad(x, [[0, 0], [1, 1], [1, 1], [0, 0]], 'REFLECT')
x = self.conv2(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
if self.use_dropout:
x = self.dropout(x, training=self.training)
x = tf.add(x, inputs) # Add is better than concatenation.
return x
class Decoder(tf.keras.Model):
def __init__(self, opt):
super(Decoder, self).__init__()
self.use_dropout = opt.use_dropout
self.norm = opt.instance_norm
self.training = opt.training
self.resize_conv = opt.resize_conv
if self.use_dropout:
self.norm = False
self.dropout = tf.keras.layers.Dropout(opt.dropout_prob)
if self.resize_conv:
# Nearest neighbour upsampling:
self.upsample = tf.keras.layers.UpSampling2D(size=(2, 2))
self.conv1 = tf.keras.layers.Conv2D(opt.ngf * 2, kernel_size=3, strides=1,
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv2 = tf.keras.layers.Conv2D(opt.ngf, kernel_size=3, strides=1,
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
else:
self.conv1 = tf.keras.layers.Conv2DTranspose(opt.ngf * 2, kernel_size=3, strides=2, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv2 = tf.keras.layers.Conv2DTranspose(opt.ngf, kernel_size=3, strides=2, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv3 = tf.keras.layers.Conv2D(3, kernel_size=7, strides=1,
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
def call(self, inputs):
x = inputs
if self.resize_conv:
x = self.upsample(x)
x = tf.pad(x, [[0, 0], [1, 1], [1, 1], [0, 0]], 'REFLECT')
x = self.conv1(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = tf.nn.relu(x)
if self.use_dropout:
x = self.dropout(x, training=self.training)
if self.resize_conv:
x = self.upsample(x)
x = tf.pad(x, [[0, 0], [1, 1], [1, 1], [0, 0]], 'REFLECT')
x = self.conv2(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = tf.nn.relu(x)
if self.use_dropout:
x = self.dropout(x, training=self.training)
x = tf.pad(x, [[0, 0], [3, 3], [3, 3], [0, 0]], 'REFLECT')
x = self.conv3(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = tf.nn.tanh(x)
if self.use_dropout:
x = self.dropout(x, training=self.training)
return x
class Generator(tf.keras.Model):
def __init__(self, opt):
super(Generator, self).__init__()
self.img_size = opt.img_size
# If true, adds skip connection from the end of the encoder to start of decoder:
self.gen_skip = opt.gen_skip
self.encoder = Encoder(opt)
if(self.img_size == 128):
self.res1 = Residual(opt)
self.res2 = Residual(opt)
self.res3 = Residual(opt)
self.res4 = Residual(opt)
self.res5 = Residual(opt)
self.res6 = Residual(opt)
else:
self.res1 = Residual(opt)
self.res2 = Residual(opt)
self.res3 = Residual(opt)
self.res4 = Residual(opt)
self.res5 = Residual(opt)
self.res6 = Residual(opt)
self.res7 = Residual(opt)
self.res8 = Residual(opt)
self.res9 = Residual(opt)
self.decoder = Decoder(opt)
@tf.contrib.eager.defun
def call(self, inputs):
inputs = self.encoder(inputs)
if(self.img_size == 128):
x = self.res1(inputs)
x = self.res2(x)
x = self.res3(x)
x = self.res4(x)
x = self.res5(x)
x = self.res6(x)
if(self.gen_skip):
x = tf.add(x, inputs)
else:
x = self.res1(inputs)
x = self.res2(x)
x = self.res3(x)
x = self.res4(x)
x = self.res5(x)
x = self.res6(x)
x = self.res7(x)
x = self.res8(x)
x = self.res9(x)
if(self.gen_skip):
x = tf.add(x, inputs)
x = self.decoder(x)
return x
class Discriminator(tf.keras.Model):
def __init__(self, opt):
super(Discriminator, self).__init__()
self.norm = opt.instance_norm
self.conv1 = tf.keras.layers.Conv2D(opt.ndf, kernel_size=4, strides=2, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv2 = tf.keras.layers.Conv2D(opt.ndf * 2, kernel_size=4, strides=2, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv3 = tf.keras.layers.Conv2D(opt.ndf * 4, kernel_size=4, strides=2, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv4 = tf.keras.layers.Conv2D(opt.ndf * 8, kernel_size=4, strides=1, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.conv5 = tf.keras.layers.Conv2D(1, kernel_size=4, strides=1, padding='same',
kernel_initializer=tf.truncated_normal_initializer(stddev=opt.init_scale))
self.leaky = tf.keras.layers.LeakyReLU(0.2)
@tf.contrib.eager.defun
def call(self, inputs):
x = self.conv1(inputs)
x = self.leaky(x)
x = self.conv2(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = self.leaky(x)
x = self.conv3(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = self.leaky(x)
x = self.conv4(x)
if self.norm:
x = tf.contrib.layers.instance_norm(x, center=False, scale=False, epsilon=1e-05, trainable=False)
x = self.leaky(x)
x = self.conv5(x)
return x
| 43.447257
| 122
| 0.572497
| 1,345
| 10,297
| 4.26171
| 0.095911
| 0.037509
| 0.045359
| 0.073273
| 0.843336
| 0.834264
| 0.80984
| 0.804431
| 0.763259
| 0.763259
| 0
| 0.028839
| 0.306303
| 10,297
| 236
| 123
| 43.631356
| 0.773625
| 0.024376
| 0
| 0.755
| 0
| 0
| 0.007897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.005
| 0
| 0.105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
876135991d0dfea8ef195aa5ae996f23c310a1f1
| 1,334
|
py
|
Python
|
scaler.py
|
Mythologyli/MLP-BP
|
5d027f7dda64eaac1198b133fdc76889b1fa446c
|
[
"MIT"
] | null | null | null |
scaler.py
|
Mythologyli/MLP-BP
|
5d027f7dda64eaac1198b133fdc76889b1fa446c
|
[
"MIT"
] | null | null | null |
scaler.py
|
Mythologyli/MLP-BP
|
5d027f7dda64eaac1198b133fdc76889b1fa446c
|
[
"MIT"
] | null | null | null |
class Scaler1D:
def __init__(self) -> None:
self.max_num: float = None
self.min_num: float = None
def transform(self, num_list: list) -> list:
if self.max_num is None:
self.max_num = max(num_list)
if self.min_num is None:
self.min_num = min(num_list)
return [(x - self.min_num) / (self.max_num - self.min_num) for x in num_list]
def inverse_transform(self, num_list: list) -> list:
return [x * (self.max_num - self.min_num) + self.min_num for x in num_list]
class Scaler2D:
def __init__(self) -> None:
self.max_num: float = None
self.min_num: float = None
def transform(self, num_list: list) -> list:
if self.max_num is None:
self.max_num = max([max(x[0], x[1]) for x in num_list])
if self.min_num is None:
self.min_num = min([min(x[0], x[1]) for x in num_list])
return [[(x[0] - self.min_num) / (self.max_num - self.min_num),
(x[1] - self.min_num) / (self.max_num - self.min_num)]
for x in num_list]
def inverse_transform(self, num_list: list) -> list:
return [[x[0] * (self.max_num - self.min_num) + self.min_num,
x[1] * (self.max_num - self.min_num) + self.min_num]
for x in num_list]
| 32.536585
| 85
| 0.574213
| 214
| 1,334
| 3.336449
| 0.102804
| 0.159664
| 0.252101
| 0.163866
| 0.935574
| 0.922969
| 0.922969
| 0.917367
| 0.917367
| 0.795518
| 0
| 0.010684
| 0.298351
| 1,334
| 40
| 86
| 33.35
| 0.752137
| 0
| 0
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.214286
| false
| 0
| 0
| 0.071429
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5e4d3df13c1df41c5d58cdfbec83bea71969f6e5
| 12,667
|
py
|
Python
|
core/Racifier.py
|
Gimongi/build-a-bard
|
aff45e68b6f52ebaed00771d76ddbf4f7b55aa34
|
[
"MIT"
] | null | null | null |
core/Racifier.py
|
Gimongi/build-a-bard
|
aff45e68b6f52ebaed00771d76ddbf4f7b55aa34
|
[
"MIT"
] | null | null | null |
core/Racifier.py
|
Gimongi/build-a-bard
|
aff45e68b6f52ebaed00771d76ddbf4f7b55aa34
|
[
"MIT"
] | null | null | null |
from core.Character import Character
from data.CharRace import CharRace
class Racifier:
# def __init__(self, character):
# self.character = character
# pass
@staticmethod
def applyRaceToCharacter(character):
# type: (Character) -> None
if character.charRace == CharRace.aarakocra:
character.dexterity += 2
character.wisdom += 1
character.speed = 25
character.languages.append("Common")
character.languages.append("Aarakocra")
character.languages.append("Auran")
pass
elif character.charRace == CharRace.aasimar:
character.charisma += 2
character.speed = 30
character.languages.append("Common")
character.languages.append("Celestial")
pass
elif character.charRace == CharRace.bugbear:
character.strength += 2
character.dexterity += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Goblin")
pass
elif character.charRace == CharRace.centaur:
character.strength += 2
character.wisdom += 1
character.speed = 40
character.languages.append("Common")
character.languages.append("Sylvan")
pass
elif character.charRace == CharRace.changeling:
character.charisma += 2
# TODO: 1 other ability score
character.speed = 30
character.languages.append("Common")
# TODO: 2 other languages
pass
elif character.charRace == CharRace.dragonborn:
character.strength += 2
character.charisma += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Draconic")
pass
elif character.charRace == CharRace.dwarf:
character.constitution += 2
character.speed = 25
character.languages.append("Common")
character.languages.append("Dwarvish")
pass
elif character.charRace == CharRace.elf:
character.dexterity += 2
character.speed = 30
character.languages.append("Common")
character.languages.append("Elvish")
pass
elif character.charRace == CharRace.fairy:
# TODO increase one ability by 2
# TODO 1 other ability score
character.speed = 30
character.languages.append("Common")
# TODO 1 other language
pass
elif character.charRace == CharRace.firbolg:
character.wisdom += 2
character.strength += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Giant")
character.languages.append("Elvish")
pass
elif character.charRace == CharRace.genasi:
character.constitution += 2
character.speed = 30
character.languages.append("Common")
character.languages.append("Primordial")
pass
elif character.charRace == CharRace.gith:
character.intelligence += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Gith")
pass
elif character.charRace == CharRace.gnome:
character.intelligence += 2
character.speed = 25
character.languages.append("Common")
character.languages.append("Gnomish")
pass
elif character.charRace == CharRace.goblin:
character.dexterity += 2
character.speed = 30
character.languages.append("Common")
character.languages.append("Goblin")
pass
elif character.charRace == CharRace.goliath:
character.strength += 2
character.constitution += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Giant")
pass
elif character.charRace == CharRace.grung:
character.dexterity += 2
character.constitution += 1
character.speed = 25
character.languages.append("Common")
character.languages.append("Grung")
pass
elif character.charRace == CharRace.half_elf:
character.charisma += 2
#TODO 2 other ability scores increased by 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Elvish")
#TODO 1 other language of your choice
pass
elif character.charRace == CharRace.halfling:
character.strength += 2
character.constitution += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Orc")
pass
elif character.charRace == CharRace.half_orc:
character.intelligence += 2
character.speed = 25
character.languages.append("Common")
character.languages.append("Gnomish")
pass
elif character.charRace == CharRace.harengon:
# TODO increase one score by 2
# TODO increase another score by
character.speed = 30
character.languages.append("Common")
# TODO 1 other language
pass
elif character.charRace == CharRace.hobgoblin:
character.strength += 1
character.constitution += 1
character.intelligence += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Goblin")
pass
elif character.charRace == CharRace.human:
character.strength += 1
character.dexterity += 1
character.constitution += 1
character.intelligence += 1
character.wisdom += 1
character.charisma += 1
character.speed = 30
character.languages.append("Common")
#TODO 1 other language of your choice
pass
elif character.charRace == CharRace.kalashtar:
character.wisdom += 1
character.charisma += 1
# TODO increase one score by 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Quori")
# TODO 1 other language
pass
elif character.charRace == CharRace.kenku:
character.dexterity += 2
character.wisdom += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Auran")
pass
elif character.charRace == CharRace.kobold:
character.dexterity += 2
character.speed = 30
character.languages.append("Common")
character.languages.append("Draconic")
pass
elif character.charRace == CharRace.leonin:
character.constitution += 2
character.strength += 1
character.speed = 35
character.languages.append("Common")
character.languages.append("Leonin")
pass
elif character.charRace == CharRace.lizardfolk:
character.constitution += 2
character.strength += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Draconic")
pass
elif character.charRace == CharRace.locathah:
character.strength += 2
character.dexterity += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Aquan")
pass
elif character.charRace == CharRace.loxodon:
character.constitution += 2
character.wisdom += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Loxodon")
pass
elif character.charRace == CharRace.minotaur:
character.strength += 2
character.constitution += 1
character.speed = 30
character.languages.append("Common")
pass
elif character.charRace == CharRace.orc:
character.strength += 2
character.speed = 30
character.languages.append("Common")
character.languages.append("Orc")
pass
elif character.charRace == CharRace.owlin:
# TODO increase one ability by 2
# TODO increase one ability by 1
character.speed = 30
character.languages.append("Common")
# TODO 1 other language
pass
elif character.charRace == CharRace.satyr:
character.charisma += 2
character.dexterity += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Sylvan")
pass
elif character.charRace == CharRace.shifter:
character.dexterity += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Quori")
# TODO 1 other language
pass
elif character.charRace == CharRace.simic_hybrid:
character.constitution += 2
# TODO increase one ability by 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Elvish")
character.languages.append("Vedalken")
pass
elif character.charRace == CharRace.tabaxi:
character.dexterity += 2
character.charisma += 1
character.speed = 30
character.languages.append("Common")
# TODO 1 other language
pass
elif character.charRace == CharRace.tiefling:
character.charisma += 2
character.intelligence += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Infernal")
pass
elif character.charRace == CharRace.tortle:
character.strength += 2
character.wisdom += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Aquan")
pass
elif character.charRace == CharRace.triton:
character.strength += 1
character.constitution += 1
character.charisma += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Aquan")
character.languages.append("Primordial")
pass
elif character.charRace == CharRace.vedalken:
character.intelligence += 2
character.wisdom += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Vedalken")
# TODO 1 other language
pass
elif character.charRace == CharRace.verdan:
character.charisma += 2
character.constitution += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Goblin")
# TODO 1 other language
pass
elif character.charRace == CharRace.warforged:
character.constitution += 2
# TODO 1 other language
character.speed = 30
character.languages.append("Common")
# TODO 1 other language
pass
elif character.charRace == CharRace.yuan_ti:
character.charisma += 2
character.intelligence += 1
character.speed = 30
character.languages.append("Common")
character.languages.append("Abyssal")
character.languages.append("Draconic")
pass
else:
raise NotImplementedError
# TODO: description should be of the character (colour/etc.)? or of the race itcharacter. Not sure. Probably of character
# character.description =
| 30.819951
| 125
| 0.561222
| 1,104
| 12,667
| 6.432065
| 0.108696
| 0.210393
| 0.280524
| 0.181665
| 0.841431
| 0.778059
| 0.763554
| 0.741304
| 0.703422
| 0.645965
| 0
| 0.022002
| 0.354149
| 12,667
| 411
| 126
| 30.819951
| 0.845985
| 0.068367
| 0
| 0.786207
| 0
| 0
| 0.043667
| 0
| 0
| 0
| 0
| 0.002433
| 0
| 1
| 0.003448
| false
| 0.148276
| 0.006897
| 0
| 0.013793
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 11
|
5e5e007baa8b98ec3da1f5d783bca6f1da6d3658
| 2,412
|
py
|
Python
|
py/lib/test/test_resnet.py
|
zjZSTU/ResNet
|
f185d1d24cdc96a533b2cf2df94f68172d820cb3
|
[
"Apache-2.0"
] | 2
|
2021-02-23T22:37:48.000Z
|
2021-09-09T02:41:45.000Z
|
py/lib/test/test_resnet.py
|
zjZSTU/ResNet
|
f185d1d24cdc96a533b2cf2df94f68172d820cb3
|
[
"Apache-2.0"
] | null | null | null |
py/lib/test/test_resnet.py
|
zjZSTU/ResNet
|
f185d1d24cdc96a533b2cf2df94f68172d820cb3
|
[
"Apache-2.0"
] | 1
|
2021-09-09T02:41:57.000Z
|
2021-09-09T02:41:57.000Z
|
# -*- coding: utf-8 -*-
"""
@date: 2020/4/28 下午9:07
@file: test_resnet.py
@author: zj
@description:
"""
import torch
from models.resnet import res_net
def test_resnet18():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet18(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet34():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet34(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet50():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet50(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet101():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet101(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet152():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet152(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet18_v2():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet18_v2(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet34_v2():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet34_v2(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet50_v2():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet50_v2(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet101_v2():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet101_v2(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
def test_resnet152_v2():
N = 1
num_classes = 20
data = torch.randn((N, 3, 224, 224))
model = res_net.resnet152_v2(num_classes=num_classes)
outputs = model(data)
assert outputs.shape == (N, num_classes)
| 19.770492
| 57
| 0.64262
| 354
| 2,412
| 4.175141
| 0.118644
| 0.270636
| 0.03383
| 0.081191
| 0.916779
| 0.916779
| 0.916779
| 0.916779
| 0.916779
| 0.916779
| 0
| 0.090958
| 0.229685
| 2,412
| 121
| 58
| 19.933884
| 0.704521
| 0.039386
| 0
| 0.694444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 1
| 0.138889
| false
| 0
| 0.027778
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5e9e9475c46d44428b2f4d05998128ecd7cb118c
| 3,328
|
py
|
Python
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_diagnostics.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_diagnostics.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/brocade_diagnostics.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class brocade_diagnostics(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def diag_post_rbridge_id_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
diag = ET.SubElement(config, "diag", xmlns="urn:brocade.com:mgmt:brocade-diagnostics")
post = ET.SubElement(diag, "post")
rbridge_id = ET.SubElement(post, "rbridge-id")
rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id")
rbridge_id_key.text = kwargs.pop('rbridge_id')
enable = ET.SubElement(rbridge_id, "enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def diag_post_rbridge_id_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
diag = ET.SubElement(config, "diag", xmlns="urn:brocade.com:mgmt:brocade-diagnostics")
post = ET.SubElement(diag, "post")
rbridge_id = ET.SubElement(post, "rbridge-id")
rbridge_id = ET.SubElement(rbridge_id, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def diag_post_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
diag = ET.SubElement(config, "diag", xmlns="urn:brocade.com:mgmt:brocade-diagnostics")
post = ET.SubElement(diag, "post")
enable = ET.SubElement(post, "enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def diag_post_rbridge_id_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
diag = ET.SubElement(config, "diag", xmlns="urn:brocade.com:mgmt:brocade-diagnostics")
post = ET.SubElement(diag, "post")
rbridge_id = ET.SubElement(post, "rbridge-id")
rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id")
rbridge_id_key.text = kwargs.pop('rbridge_id')
enable = ET.SubElement(rbridge_id, "enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
def diag_post_rbridge_id_rbridge_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
diag = ET.SubElement(config, "diag", xmlns="urn:brocade.com:mgmt:brocade-diagnostics")
post = ET.SubElement(diag, "post")
rbridge_id = ET.SubElement(post, "rbridge-id")
rbridge_id = ET.SubElement(rbridge_id, "rbridge-id")
rbridge_id.text = kwargs.pop('rbridge_id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def diag_post_enable(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
diag = ET.SubElement(config, "diag", xmlns="urn:brocade.com:mgmt:brocade-diagnostics")
post = ET.SubElement(diag, "post")
enable = ET.SubElement(post, "enable")
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 38.252874
| 94
| 0.619291
| 384
| 3,328
| 5.210938
| 0.098958
| 0.161919
| 0.111944
| 0.125937
| 0.931534
| 0.931534
| 0.931534
| 0.931534
| 0.931534
| 0.931534
| 0
| 0
| 0.24369
| 3,328
| 87
| 95
| 38.252874
| 0.794994
| 0.066406
| 0
| 0.928571
| 1
| 0
| 0.170573
| 0.078125
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.017857
| 0
| 0.267857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5e9e9505a6c27ec381413f431bde0bbf500fba09
| 5,801
|
py
|
Python
|
spacenet/schemas/test/test_resource.py
|
space-logistics-org/spacenet
|
fd004437ed7b27dd6dc41a374e1dedfcea92e37d
|
[
"MIT"
] | 1
|
2022-02-17T18:01:41.000Z
|
2022-02-17T18:01:41.000Z
|
spacenet/schemas/test/test_resource.py
|
space-logistics-org/spacenet
|
fd004437ed7b27dd6dc41a374e1dedfcea92e37d
|
[
"MIT"
] | 2
|
2021-06-19T19:41:15.000Z
|
2021-07-21T17:07:48.000Z
|
spacenet/schemas/test/test_resource.py
|
space-logistics-org/spacenet
|
fd004437ed7b27dd6dc41a374e1dedfcea92e37d
|
[
"MIT"
] | 3
|
2021-06-16T16:31:12.000Z
|
2022-02-17T18:02:57.000Z
|
"""
This module contains tests for resources.
"""
import unittest
import pytest
from pydantic import ValidationError
from spacenet.schemas.resource import (
ClassOfSupply,
ContinuousResource,
DiscreteResource,
ResourceType,
)
pytestmark = [pytest.mark.unit, pytest.mark.resource, pytest.mark.schema]
class TestDisResource(unittest.TestCase):
def test_good_data(self):
name_ = "Fuel"
type_ = ResourceType("Discrete")
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = 105
unitvolume_ = 150
res = DiscreteResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
self.assertEqual(res.name, name_)
self.assertEqual(res.type, type_)
self.assertEqual(res.class_of_supply, cos)
self.assertEqual(res.units, units_)
self.assertEqual(res.unit_mass, unitmass_)
self.assertEqual(res.unit_volume, unitvolume_)
def test_invalidType(self):
name_ = "Fuel"
type_ = 10
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = 105
unitvolume_ = 150
with self.assertRaises(ValidationError):
res = DiscreteResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
def test_invalidCOS(self):
name_ = "Fuel"
type_ = ResourceType("Discrete")
cos = "free"
units_ = "kg"
unitmass_ = 105
unitvolume_ = 150
with self.assertRaises(ValidationError):
res = DiscreteResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
def test_invalidMass(self):
name_ = "Fuel"
type_ = ResourceType("Discrete")
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = -10
unitvolume_ = 150
with self.assertRaises(ValidationError):
res = DiscreteResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
def test_invalidVolume(self):
name_ = "Fuel"
type_ = ResourceType("Discrete")
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = 105
unitvolume_ = "vol"
with self.assertRaises(ValidationError):
res = DiscreteResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
class TestConResource(unittest.TestCase):
def test_good_data(self):
name_ = "Fuel"
type_ = ResourceType("Continuous")
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = 105.5
unitvolume_ = 150.8
res = ContinuousResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
self.assertEqual(res.name, name_)
self.assertEqual(res.type, type_)
self.assertEqual(res.class_of_supply, cos)
self.assertEqual(res.units, units_)
self.assertEqual(res.unit_mass, unitmass_)
self.assertEqual(res.unit_volume, unitvolume_)
def test_invalidType(self):
name_ = "Fuel"
type_ = 10
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = 105.5
unitvolume_ = 150.8
with self.assertRaises(ValidationError):
res = ContinuousResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
def test_invalidCOS(self):
name_ = "Fuel"
type_ = ResourceType("Continuous")
cos = "free"
units_ = "kg"
unitmass_ = 105.5
unitvolume_ = 150.8
with self.assertRaises(ValidationError):
res = ContinuousResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
def test_invalidMass(self):
name_ = "Fuel"
type_ = ResourceType("Continuous")
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = -10
unitvolume_ = 150.8
with self.assertRaises(ValidationError):
res = ContinuousResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
def test_invalidVolume(self):
name_ = "Fuel"
type_ = ResourceType("Continuous")
cos = ClassOfSupply(101)
units_ = "kg"
unitmass_ = 105.5
unitvolume_ = "vol"
with self.assertRaises(ValidationError):
res = ContinuousResource(
name=name_,
type=type_,
class_of_supply=cos,
units=units_,
unit_mass=unitmass_,
unit_volume=unitvolume_,
)
| 28.860697
| 73
| 0.53577
| 507
| 5,801
| 5.794872
| 0.122288
| 0.032675
| 0.053097
| 0.065351
| 0.903336
| 0.903336
| 0.900613
| 0.890402
| 0.890402
| 0.888019
| 0
| 0.024581
| 0.382865
| 5,801
| 200
| 74
| 29.005
| 0.796089
| 0.007068
| 0
| 0.879121
| 0
| 0
| 0.025382
| 0
| 0
| 0
| 0
| 0
| 0.10989
| 1
| 0.054945
| false
| 0
| 0.021978
| 0
| 0.087912
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d7326ef05aa33fdcc0e26fd331e229d82688739
| 1,450
|
py
|
Python
|
tests/rules/test_integer.py
|
bakurits/Validator
|
4e666cf3cb2805e44baa257fed77df44662e6f86
|
[
"MIT"
] | null | null | null |
tests/rules/test_integer.py
|
bakurits/Validator
|
4e666cf3cb2805e44baa257fed77df44662e6f86
|
[
"MIT"
] | null | null | null |
tests/rules/test_integer.py
|
bakurits/Validator
|
4e666cf3cb2805e44baa257fed77df44662e6f86
|
[
"MIT"
] | null | null | null |
from validator.rules import Integer
def test_integer_01():
assert Integer().check("23")
assert Integer().check("0")
assert Integer().check("99999")
assert Integer().check("85189125")
assert Integer().check("-1")
assert Integer().check("-10000")
assert Integer().check("-161651651")
def test_integer_02():
assert Integer().check(23)
assert Integer().check(0)
assert Integer().check(99999)
assert Integer().check(85189125)
assert Integer().check(-1)
assert Integer().check(-10000)
assert Integer().check(-161651651)
def test_integer_03():
assert not Integer().check("-")
assert not Integer().check("9.1")
assert not Integer().check("-0.0")
assert not Integer().check("0.0")
assert not Integer().check("10.0")
assert not Integer().check("-10000.213")
assert not Integer().check("-161651651.12312312312")
def test_integer_04():
assert not Integer().check(9.1)
assert not Integer().check(-0.0)
assert not Integer().check(0.0)
assert not Integer().check(10.0)
assert not Integer().check(-10000.213)
assert not Integer().check(-161651651.12312312312)
def test_integer_05():
assert not Integer().check([])
assert not Integer().check([0, 1, 2])
assert not Integer().check("string")
assert not Integer().check(None)
assert not Integer().check({"a": 2})
assert not Integer().check(__file__)
| 18.589744
| 56
| 0.642069
| 189
| 1,450
| 4.851852
| 0.164021
| 0.431843
| 0.331516
| 0.435115
| 0.876772
| 0.827699
| 0.827699
| 0.827699
| 0.758997
| 0.758997
| 0
| 0.127334
| 0.187586
| 1,450
| 77
| 57
| 18.831169
| 0.651104
| 0
| 0
| 0
| 0
| 0
| 0.06069
| 0.015172
| 0
| 0
| 0
| 0
| 0.846154
| 1
| 0.128205
| true
| 0
| 0.025641
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
0d8f0cf0011d129ad0fba8c0f439b9a078fb1d62
| 20,417
|
py
|
Python
|
psono/administration/tests/user.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 48
|
2018-04-19T15:50:58.000Z
|
2022-01-23T15:58:11.000Z
|
psono/administration/tests/user.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 9
|
2018-09-13T14:56:18.000Z
|
2020-01-17T16:44:33.000Z
|
psono/administration/tests/user.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 11
|
2019-09-20T11:53:47.000Z
|
2021-07-18T22:41:31.000Z
|
from django.urls import reverse
from django.conf import settings
from django.contrib.auth.hashers import make_password
from rest_framework import status
import random
import string
import binascii
import os
from mock import patch
from restapi.utils import encrypt_with_db_secret
from restapi import models
from restapi.tests.base import APITestCaseExtended
class ReadUserTests(APITestCaseExtended):
def setUp(self):
self.test_email = encrypt_with_db_secret(''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com')
self.test_email2 = encrypt_with_db_secret(''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com')
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
def test_read_specific_user_success(self):
"""
Tests GET method on a specific user
"""
url = reverse('admin_user', kwargs={'user_id': str(self.test_user_obj.id)})
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.get(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read_specific_user_failure_not_exist(self):
"""
Tests GET method on a specific user
"""
url = reverse('admin_user', kwargs={'user_id': '6fdbe7bb-b93f-4ef5-817d-7ef9aa7dd9de'})
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.get(url, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_read_specific_user_failure_no_admin_rights(self):
"""
Tests GET method on user
"""
url = reverse('admin_user', kwargs={'user_id': str(self.test_user_obj.id)})
data = {
}
self.client.force_authenticate(user=self.test_user_obj)
response = self.client.get(url, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_read_user_success(self):
"""
Tests GET method on user
"""
url = reverse('admin_user')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.get(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['users']), 2)
def test_read_user_failure_without_admin_privileges(self):
"""
Tests GET method on user
"""
url = reverse('admin_user')
data = {
}
self.client.force_authenticate(user=self.test_user_obj)
response = self.client.get(url, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
class CreateUserTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
def test_successful(self):
"""
Tests to create a user
"""
url = reverse('admin_user')
data = {
'username': ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw',
'email': ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com',
'password': '123456',
}
self.client.force_authenticate(user=self.admin)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(models.User.objects.filter(username=data['username']).exists())
class UpdateUserTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email3 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'd'
self.test_email_bcrypt3 = 'e'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username3 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce3 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce3 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test@example.com'
self.test_email_bcrypt2 = "b"
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test@psono.pw'
self.test_authkey2 = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key2 = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key2 = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key2 = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce2 = 'a67fef1ff29eb8f866feaccad336fc6311fa4c71bc183b14c8fceff7416add99'
self.test_user_obj2 = models.User.objects.create(
username=self.test_username2,
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
authkey=make_password(self.test_authkey2),
public_key=self.test_public_key2,
private_key=self.test_private_key2,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key2,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce2,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email3,
email_bcrypt=self.test_email_bcrypt3,
username=self.test_username3,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce3,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce3,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
def test_update_user_success(self):
"""
Tests Update user
"""
url = reverse('admin_user')
data = {
'user_id': self.test_user_obj.id
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update_user_email_success(self):
"""
Tests Update user
"""
url = reverse('admin_user')
data = {
'user_id': self.test_user_obj.id,
'email': ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@patch('administration.serializers.update_user.settings', REGISTRATION_EMAIL_FILTER=['example2.com'])
def test_update_user_email_success_with_email_registration_filter(self, patched_registration_email_filter):
"""
Tests Update user
"""
url = reverse('admin_user')
data = {
'user_id': self.test_user_obj.id,
'email': ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_update_user_email_but_it_already_exists(self):
"""
Tests Update user email with an email that already exists
"""
url = reverse('admin_user')
data = {
'user_id': self.test_user_obj.id,
'email': ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data['user_id'] = self.test_user_obj2.id
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_update_user_error_user_id_does_not_exist(self):
"""
Tests Update user that does not exist
"""
url = reverse('admin_user')
data = {
'user_id': 'bdf36a14-052d-400a-8701-1813d542c74c'
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_update_user_no_user_id_error(self):
"""
Tests Update user without user id
"""
url = reverse('admin_user')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
class DeleteUserTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
def test_delete_user_success(self):
"""
Tests DELETE method on user
"""
url = reverse('admin_user')
data = {
'user_id': self.test_user_obj.id
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(models.Duo.objects.all().count(), 0)
def test_delete_user_failure_no_admin(self):
"""
Tests DELETE method on user without being an admin
"""
url = reverse('admin_user')
data = {
'user_id': self.test_user_obj.id
}
self.client.force_authenticate(user=self.test_user_obj)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_delete_user_failure_no_user_id(self):
"""
Tests DELETE method on user without a user id
"""
url = reverse('admin_user')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_delete_user_failure_user_id_not_exist(self):
"""
Tests DELETE method on user with a user id that does not exist
"""
url = reverse('admin_user')
data = {
'user_id': '499d3c84-e8ae-4a6b-a4c2-43c79beb069a'
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| 40.190945
| 138
| 0.678307
| 2,540
| 20,417
| 5.132283
| 0.069291
| 0.108622
| 0.049555
| 0.06996
| 0.896057
| 0.884167
| 0.866293
| 0.853483
| 0.852869
| 0.849033
| 0
| 0.026762
| 0.220356
| 20,417
| 507
| 139
| 40.270217
| 0.792185
| 0.026498
| 0
| 0.715942
| 0
| 0
| 0.059161
| 0.024351
| 0
| 0
| 0
| 0
| 0.057971
| 1
| 0.057971
| false
| 0.031884
| 0.034783
| 0
| 0.104348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0dba619c85084143952db642b02ef58cfc0fdfbb
| 25,029
|
py
|
Python
|
src/tests/api/test_oauth.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/tests/api/test_oauth.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/tests/api/test_oauth.py
|
upsidedownpancake/pretix
|
bfeeb1028c9eccab4936029db7c38edd4cd5aad5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import base64
import json
import pytest
from django.utils.http import urlquote
from django.utils.timezone import now
from pretix.api.models import (
OAuthAccessToken, OAuthApplication, OAuthGrant, OAuthRefreshToken,
)
from pretix.base.models import Event, Organizer, Team, User
@pytest.fixture
def organizer():
return Organizer.objects.create(name='Dummy', slug='dummy')
@pytest.fixture
def event(organizer):
event = Event.objects.create(
organizer=organizer, name='Dummy', slug='dummy',
date_from=now()
)
return event
@pytest.fixture
def admin_team(organizer):
return Team.objects.create(organizer=organizer, can_change_teams=True, name='Admin team', all_events=True,
can_create_events=True)
@pytest.fixture
def admin_user(admin_team):
u = User.objects.create_user('dummy@dummy.dummy', 'dummy')
admin_team.members.add(u)
return u
@pytest.fixture
def application():
return OAuthApplication.objects.create(
name="pretalx",
redirect_uris="https://pretalx.com",
client_type='confidential',
authorization_grant_type='authorization-code'
)
@pytest.mark.django_db
def test_authorize_require_login(client, application: OAuthApplication):
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s' % (
application.client_id, urlquote('https://example.org')
))
assert resp.status_code == 302
assert resp['Location'].startswith('/control/login')
@pytest.mark.django_db
def test_authorize_invalid_redirect_uri(client, admin_user, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s' % (
application.client_id, urlquote('https://example.org')
))
assert resp.status_code == 400
@pytest.mark.django_db
def test_authorize_missing_response_type(client, admin_user, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 302
assert resp['Location'] == 'https://pretalx.com?error=invalid_request&error_description=Missing+response_type+parameter.'
@pytest.mark.django_db
def test_authorize_require_organizer(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
), data={
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 200
@pytest.mark.django_db
def test_authorize_denied(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
})
assert resp.status_code == 302
assert resp['Location'] == 'https://pretalx.com?error=access_denied'
@pytest.mark.django_db
def test_authorize_disallow_response_token(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=token' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 302
assert resp['Location'] == 'https://pretalx.com?error=unauthorized_client'
@pytest.mark.django_db
def test_authorize_read_scope(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
grant = OAuthGrant.objects.get(code=code)
assert list(grant.organizers.all()) == [organizer]
assert grant.scope == "read"
@pytest.mark.django_db
def test_authorize_state(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code&state=asdadf' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
'state': 'asdadf'
})
assert resp.status_code == 302
assert 'state=asdadf' in resp['Location']
@pytest.mark.django_db
def test_authorize_default_scope(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
client.logout()
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
grant = OAuthGrant.objects.get(code=code)
assert list(grant.organizers.all()) == [organizer]
assert grant.scope == "read write"
@pytest.mark.django_db
def test_token_from_code_without_auth(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
})
assert resp.status_code == 401
@pytest.mark.django_db
def test_token_from_code(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
assert data['expires_in'] == 86400
assert data['token_type'] == "Bearer"
assert data['scope'] == "read write"
access_token = data['access_token']
grant = OAuthAccessToken.objects.get(token=access_token)
assert list(grant.organizers.all()) == [organizer]
@pytest.mark.django_db
def test_use_token_for_access_one_organizer(client, admin_user, organizer, application: OAuthApplication):
o2 = Organizer.objects.create(name='A', slug='a')
t2 = Team.objects.create(organizer=o2, can_change_teams=True, name='Admin team', all_events=True)
t2.members.add(admin_user)
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
access_token = data['access_token']
resp = client.get('/api/v1/organizers/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 200
data = json.loads(resp.content.decode())
assert data == {'count': 1, 'next': None, 'previous': None, 'results': [{'name': 'Dummy', 'slug': 'dummy'}]}
resp = client.get('/api/v1/organizers/dummy/events/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 200
resp = client.get('/api/v1/organizers/a/events/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 403
@pytest.mark.django_db
def test_use_token_for_access_two_organizers(client, admin_user, organizer, application: OAuthApplication):
o2 = Organizer.objects.create(name='A', slug='a')
t2 = Team.objects.create(organizer=o2, can_change_teams=True, name='Admin team', all_events=True)
t2.members.add(admin_user)
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': [str(organizer.pk), str(o2.pk)],
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
access_token = data['access_token']
resp = client.get('/api/v1/organizers/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 200
data = json.loads(resp.content.decode())
assert data == {'count': 2, 'next': None, 'previous': None, 'results': [
{'name': 'A', 'slug': 'a'},
{'name': 'Dummy', 'slug': 'dummy'},
]}
resp = client.get('/api/v1/organizers/dummy/events/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 200
resp = client.get('/api/v1/organizers/a/events/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 200
@pytest.mark.django_db
def test_token_refresh(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
refresh_token = data['refresh_token']
access_token = data['access_token']
resp = client.post('/api/v1/oauth/token', data={
'refresh_token': refresh_token,
'grant_type': 'refresh_token',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
assert not OAuthAccessToken.objects.filter(token=access_token).exists() # old token revoked
data = json.loads(resp.content.decode())
access_token = data['access_token']
grant = OAuthAccessToken.objects.get(token=access_token)
assert list(grant.organizers.all()) == [organizer]
@pytest.mark.django_db
def test_allow_write(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': [str(organizer.pk)],
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
access_token = data['access_token']
resp = client.post('/api/v1/organizers/dummy/events/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 400
@pytest.mark.django_db
def test_allow_read_only(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': [str(organizer.pk)],
'redirect_uri': application.redirect_uris,
'scope': 'read',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
access_token = data['access_token']
resp = client.post('/api/v1/organizers/dummy/events/', HTTP_AUTHORIZATION='Bearer %s' % access_token)
assert resp.status_code == 403
@pytest.mark.django_db
def test_token_revoke_refresh_token(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
refresh_token = data['refresh_token']
access_token = data['access_token']
resp = client.post('/api/v1/oauth/revoke_token', data={
'token': refresh_token,
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
assert not OAuthAccessToken.objects.get(token=access_token).is_valid()
assert not OAuthRefreshToken.objects.filter(token=refresh_token, revoked__isnull=True).exists()
resp = client.post('/api/v1/oauth/token', data={
'refresh_token': refresh_token,
'grant_type': 'refresh_token',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 401
@pytest.mark.django_db
def test_token_revoke_access_token(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
refresh_token = data['refresh_token']
access_token = data['access_token']
resp = client.post('/api/v1/oauth/revoke_token', data={
'token': access_token,
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
assert not OAuthAccessToken.objects.get(token=access_token).is_valid() # old token revoked
resp = client.post('/api/v1/oauth/token', data={
'refresh_token': refresh_token,
'grant_type': 'refresh_token',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
access_token = data['access_token']
grant = OAuthAccessToken.objects.get(token=access_token)
assert list(grant.organizers.all()) == [organizer]
@pytest.mark.django_db
def test_user_revoke(client, admin_user, organizer, application: OAuthApplication):
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.get('/api/v1/oauth/authorize?client_id=%s&redirect_uri=%s&response_type=code' % (
application.client_id, urlquote(application.redirect_uris)
))
assert resp.status_code == 200
resp = client.post('/api/v1/oauth/authorize', data={
'organizers': str(organizer.pk),
'redirect_uri': application.redirect_uris,
'scope': 'read write',
'client_id': application.client_id,
'response_type': 'code',
'allow': 'Authorize',
})
assert resp.status_code == 302
assert resp['Location'].startswith('https://pretalx.com?code=')
code = resp['Location'].split("=")[1]
client.logout()
resp = client.post('/api/v1/oauth/token', data={
'code': code,
'redirect_uri': application.redirect_uris,
'grant_type': 'authorization_code',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 200
data = json.loads(resp.content.decode())
refresh_token = data['refresh_token']
access_token = data['access_token']
at = OAuthAccessToken.objects.get(token=access_token)
client.login(email='dummy@dummy.dummy', password='dummy')
resp = client.post('/control/settings/oauth/authorized/{}/revoke'.format(at.pk), data={
})
assert resp.status_code == 302
client.logout()
assert not OAuthAccessToken.objects.filter(token=access_token).exists()
assert OAuthRefreshToken.objects.get(token=refresh_token).revoked
resp = client.post('/api/v1/oauth/token', data={
'refresh_token': refresh_token,
'grant_type': 'refresh_token',
}, HTTP_AUTHORIZATION='Basic ' + base64.b64encode(
('%s:%s' % (application.client_id, application.client_secret)).encode()).decode())
assert resp.status_code == 401
| 42.638842
| 125
| 0.675736
| 3,014
| 25,029
| 5.436961
| 0.049768
| 0.041496
| 0.057607
| 0.072008
| 0.914872
| 0.914872
| 0.906999
| 0.893513
| 0.884604
| 0.874535
| 0
| 0.015626
| 0.169004
| 25,029
| 586
| 126
| 42.711604
| 0.772249
| 0.001398
| 0
| 0.854442
| 0
| 0.00189
| 0.236285
| 0.079189
| 0
| 0
| 0
| 0
| 0.177694
| 1
| 0.045369
| false
| 0.035917
| 0.013233
| 0.005671
| 0.068053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0dd41d69bdb9838f51c6c8f8274f6301a2557bc0
| 21,488
|
py
|
Python
|
bookclubs/tests/views/club_views/test_update_club_info.py
|
hihi-itsann/SEG_Major_Group
|
bec3d333da59361fd3036f20bb4d7aa82bc3fa5e
|
[
"MIT"
] | 6
|
2022-02-05T23:03:59.000Z
|
2022-02-08T03:08:58.000Z
|
bookclubs/tests/views/club_views/test_update_club_info.py
|
hihi-itsann/SEG_Major_Group
|
bec3d333da59361fd3036f20bb4d7aa82bc3fa5e
|
[
"MIT"
] | 2
|
2022-03-13T04:18:25.000Z
|
2022-03-16T20:49:53.000Z
|
bookclubs/tests/views/club_views/test_update_club_info.py
|
hihi-itsann/SEG_Major_Group
|
bec3d333da59361fd3036f20bb4d7aa82bc3fa5e
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.urls import reverse
from bookclubs.forms import ClubForm
from bookclubs.models import Club, User, Role
class UpdateClubViewTestCase(TestCase):
"""Tests for updating club information"""
fixtures = [
'bookclubs/tests/fixtures/default_user.json',
'bookclubs/tests/fixtures/other_users.json',
'bookclubs/tests/fixtures/default_clubs.json',
]
def setUp(self):
self.user = User.objects.get(username='@johndoe')
self.user2 = User.objects.get(username='@janedoe')
self.club1 = Club.objects.get(club_name='private_online')
self.club2 = Club.objects.get(club_name='public_online')
self.club3 = Club.objects.get(club_name='private_in-person')
self.club4 = Club.objects.get(club_name='public_in-person')
self._create_test_roles()
self.url1 = reverse('update_club_info', kwargs={'club_name': self.club1.club_name})
self.url2 = reverse('update_club_info', kwargs={'club_name': self.club2.club_name})
self.url3 = reverse('update_club_info', kwargs={'club_name': self.club3.club_name})
self.url4 = reverse('update_club_info', kwargs={'club_name': self.club4.club_name})
self.form_input_online_public = {
'club_name': 'club1',
'meeting_status': 'ONL',
'location': 'location',
'city': 'city1',
'country': 'country1',
'public_status': 'PUB',
'genre': 'Non-Fiction',
'description': 'description',
}
self.form_input_online_private = {
'club_name': 'club1',
'meeting_status': 'ONL',
'location': 'location',
'city': 'city1',
'country': 'country1',
'public_status': 'PRI',
'genre': 'Fiction',
'description': 'description',
}
self.form_input_in_person_private = {
'club_name': 'club1',
'meeting_status': 'OFF',
'location': 'location',
'city': 'city1',
'country': 'country1',
'public_status': 'PRI',
'genre': 'Fiction',
'description': 'description',
}
self.form_input_in_person_public = {
'club_name': 'club1',
'meeting_status': 'OFF',
'location': 'location',
'city': 'city1',
'country': 'country1',
'public_status': 'PUB',
'genre': 'Non-Fiction',
'description': 'description',
}
def test_update_club_info_url(self):
self.assertEqual(reverse('update_club_info', kwargs={'club_name': self.club1.club_name}),
f'/club/{self.club1.club_name}/update_club_info/')
self.assertEqual(reverse('update_club_info', kwargs={'club_name': self.club2.club_name}),
f'/club/{self.club2.club_name}/update_club_info/')
self.assertEqual(reverse('update_club_info', kwargs={'club_name': self.club3.club_name}),
f'/club/{self.club3.club_name}/update_club_info/')
self.assertEqual(reverse('update_club_info', kwargs={'club_name': self.club4.club_name}),
f'/club/{self.club4.club_name}/update_club_info/')
def test_get_update_club1_url(self):
self.client.login(username=self.user.username, password='Password123')
response = self.client.get(self.url1)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
self.assertTrue(isinstance(form, ClubForm))
self.assertEqual(form.instance, self.club1)
def test_get_update_club2_url(self):
self.client.login(username=self.user.username, password='Password123')
response = self.client.get(self.url2)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
self.assertTrue(isinstance(form, ClubForm))
self.assertEqual(form.instance, self.club2)
def test_get_update_club3_url(self):
self.client.login(username=self.user.username, password='Password123')
response = self.client.get(self.url3)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
self.assertTrue(isinstance(form, ClubForm))
self.assertEqual(form.instance, self.club3)
def test_get_update_club4_url(self):
self.client.login(username=self.user.username, password='Password123')
response = self.client.get(self.url4)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
self.assertTrue(isinstance(form, ClubForm))
self.assertEqual(form.instance, self.club4)
def test_unsuccessful_club1_wrong_name_update(self):
self.client.login(username=self.user.username, password='Password123')
self.form_input_online_public['club_name'] = 'BAD'
before_count = Club.objects.count()
response = self.client.post(self.url1, self.form_input_online_public)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
club_name = response.context['club_name']
self.assertTrue(isinstance(form, ClubForm))
self.assertTrue(club_name, self.club1.club_name)
self.assertTrue(form.is_bound)
self.user.refresh_from_db()
self.assertEqual(self.club1.club_name, 'private_online')
self.assertEqual(self.club1.meeting_status, 'ONL')
self.assertEqual(self.club1.location, 'Bush House')
self.assertEqual(self.club1.public_status, 'PRI')
self.assertEqual(self.club1.genre, 'Fiction')
self.assertEqual(self.club1.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_unsuccessful_club2_wrong_meeting_status_update(self):
self.client.login(username=self.user.username, password='Password123')
self.form_input_online_private['meeting_status'] = 'BAD'
before_count = Club.objects.count()
response = self.client.post(self.url2, self.form_input_online_private)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
club_name = response.context['club_name']
self.assertTrue(isinstance(form, ClubForm))
self.assertTrue(club_name, self.club2.club_name)
self.assertTrue(form.is_bound)
self.user.refresh_from_db()
self.assertEqual(self.club2.club_name, 'public_online')
self.assertEqual(self.club2.meeting_status, 'ONL')
self.assertEqual(self.club2.location, 'Bush House')
self.assertEqual(self.club2.public_status, 'PUB')
self.assertEqual(self.club2.genre, 'Fiction')
self.assertEqual(self.club2.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_unsuccessful_club3_wrong_location_update(self):
self.client.login(username=self.user.username, password='Password123')
self.form_input_online_private['location'] = ''
before_count = Club.objects.count()
response = self.client.post(self.url2, self.form_input_online_private)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
club_name = response.context['club_name']
self.assertTrue(isinstance(form, ClubForm))
self.assertTrue(club_name, self.club3.club_name)
self.assertTrue(form.is_bound)
self.user.refresh_from_db()
self.assertEqual(self.club3.club_name, 'private_in-person')
self.assertEqual(self.club3.meeting_status, 'OFF')
self.assertEqual(self.club3.location, 'Bush House')
self.assertEqual(self.club3.public_status, 'PRI')
self.assertEqual(self.club3.genre, 'Fiction')
self.assertEqual(self.club3.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_unsuccessful_club4_wrong_genre_update(self):
self.client.login(username=self.user.username, password='Password123')
self.form_input_online_private['genre'] = ''
before_count = Club.objects.count()
response = self.client.post(self.url2, self.form_input_online_private)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'update_club_info.html')
form = response.context['form']
club_name = response.context['club_name']
self.assertTrue(isinstance(form, ClubForm))
self.assertTrue(club_name, self.club4.club_name)
self.assertTrue(form.is_bound)
self.user.refresh_from_db()
self.assertEqual(self.club4.club_name, 'public_in-person')
self.assertEqual(self.club4.meeting_status, 'OFF')
self.assertEqual(self.club4.location, 'Bush House')
self.assertEqual(self.club4.public_status, 'PUB')
self.assertEqual(self.club4.genre, 'Fiction')
self.assertEqual(self.club4.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_moderator_cannot_update_club1(self):
self.client.login(username=self.user2.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url1, self.form_input_online_public)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
response_url = reverse('club_feed', kwargs={'club_name': self.club1.club_name})
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
self.user.refresh_from_db()
self.assertEqual(self.club1.club_name, 'private_online')
self.assertEqual(self.club1.meeting_status, 'ONL')
self.assertEqual(self.club1.location, 'Bush House')
self.assertEqual(self.club1.public_status, 'PRI')
self.assertEqual(self.club1.genre, 'Fiction')
self.assertEqual(self.club1.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_member_cannot_update_club2(self):
self.client.login(username=self.user2.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url2, self.form_input_online_public)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
response_url = reverse('club_feed', kwargs={'club_name': self.club2.club_name})
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
self.user.refresh_from_db()
self.assertEqual(self.club2.club_name, 'public_online')
self.assertEqual(self.club2.meeting_status, 'ONL')
self.assertEqual(self.club2.location, 'Bush House')
self.assertEqual(self.club2.public_status, 'PUB')
self.assertEqual(self.club2.genre, 'Fiction')
self.assertEqual(self.club2.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_banned_user_cannot_update_club3(self):
self.client.login(username=self.user2.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url3, self.form_input_online_public)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
response_url = reverse('feed')
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
self.user.refresh_from_db()
self.assertEqual(self.club3.club_name, 'private_in-person')
self.assertEqual(self.club3.meeting_status, 'OFF')
self.assertEqual(self.club3.location, 'Bush House')
self.assertEqual(self.club3.public_status, 'PRI')
self.assertEqual(self.club3.genre, 'Fiction')
self.assertEqual(self.club3.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_non_member_cannot_update_club4(self):
self.client.login(username=self.user2.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url4, self.form_input_online_public)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
response_url = reverse('feed')
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
self.user.refresh_from_db()
self.assertEqual(self.club4.club_name, 'public_in-person')
self.assertEqual(self.club4.meeting_status, 'OFF')
self.assertEqual(self.club4.location, 'Bush House')
self.assertEqual(self.club4.public_status, 'PUB')
self.assertEqual(self.club4.genre, 'Fiction')
self.assertEqual(self.club4.description,
'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ipsum dolor sit amet consectetur adipiscing. Mauris in aliquam sem fringilla ut morbi tincidunt augue interdum.')
def test_cannot_update_non_existent_club(self):
self.client.login(username=self.user2.username, password='Password123')
self.url5 = reverse('update_club_info', kwargs={'club_name': 'non_exist'})
before_count = Club.objects.count()
response = self.client.post(self.url5, self.form_input_online_public)
after_count = Club.objects.count()
self.assertEqual(after_count, before_count)
response_url = reverse('feed')
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
self.user.refresh_from_db()
def test_successful_club1_update(self):
self.client.login(username=self.user.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url1, self.form_input_online_public, follow=True)
after_count = Club.objects.count()
self.user.refresh_from_db()
self.assertEqual(after_count, before_count)
response_url = reverse('club_feed', kwargs={'club_name': 'club1'})
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
# response = self.client.post(self.url, self.form_input_online_public)
self.assertTemplateUsed(response, 'club_feed.html')
self.club1 = Club.objects.get(club_name='club1')
self.assertEqual(self.club1.meeting_status, 'ONL')
self.assertEqual(self.club1.city, 'city1')
self.assertEqual(self.club1.country, 'country1')
self.assertEqual(self.club1.location, 'location')
self.assertEqual(self.club1.public_status, 'PUB')
self.assertEqual(self.club1.genre, 'Non-Fiction')
self.assertEqual(self.club1.description, 'description')
def test_successful_club2_update(self):
self.client.login(username=self.user.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url2, self.form_input_online_private, follow=True)
after_count = Club.objects.count()
self.user.refresh_from_db()
self.assertEqual(after_count, before_count)
response_url = reverse('club_feed', kwargs={'club_name': 'club1'})
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
# response = self.client.post(self.url, self.form_input_online_public)
self.assertTemplateUsed(response, 'club_feed.html')
self.club2 = Club.objects.get(club_name='club1')
self.assertEqual(self.club2.meeting_status, 'ONL')
self.assertEqual(self.club2.city, 'city1')
self.assertEqual(self.club2.country, 'country1')
self.assertEqual(self.club2.location, 'location')
self.assertEqual(self.club2.public_status, 'PRI')
self.assertEqual(self.club2.genre, 'Fiction')
self.assertEqual(self.club2.description, 'description')
def test_successful_club3_update(self):
self.client.login(username=self.user.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url3, self.form_input_in_person_private, follow=True)
after_count = Club.objects.count()
self.user.refresh_from_db()
self.assertEqual(after_count, before_count)
response_url = reverse('club_feed', kwargs={'club_name': 'club1'})
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
# response = self.client.post(self.url, self.form_input_online_public)
self.assertTemplateUsed(response, 'club_feed.html')
self.club3 = Club.objects.get(club_name='club1')
self.assertEqual(self.club3.meeting_status, 'OFF')
self.assertEqual(self.club3.city, 'city1')
self.assertEqual(self.club3.country, 'country1')
self.assertEqual(self.club3.location, 'location')
self.assertEqual(self.club3.public_status, 'PRI')
self.assertEqual(self.club3.genre, 'Fiction')
self.assertEqual(self.club3.description, 'description')
def test_successful_club4_update(self):
self.client.login(username=self.user.username, password='Password123')
before_count = Club.objects.count()
response = self.client.post(self.url4, self.form_input_in_person_public, follow=True)
after_count = Club.objects.count()
self.user.refresh_from_db()
self.assertEqual(after_count, before_count)
response_url = reverse('club_feed', kwargs={'club_name': 'club1'})
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
# response = self.client.post(self.url, self.form_input_online_public)
self.assertTemplateUsed(response, 'club_feed.html')
self.club3 = Club.objects.get(club_name='club1')
self.assertEqual(self.club3.meeting_status, 'OFF')
self.assertEqual(self.club3.city, 'city1')
self.assertEqual(self.club3.country, 'country1')
self.assertEqual(self.club3.location, 'location')
self.assertEqual(self.club3.public_status, 'PUB')
self.assertEqual(self.club3.genre, 'Non-Fiction')
self.assertEqual(self.club3.description, 'description')
def _create_test_roles(self):
Role.objects.create(
club=self.club1,
user=self.user,
club_role='OWN',
)
Role.objects.create(
club=self.club2,
user=self.user,
club_role='OWN',
)
Role.objects.create(
club=self.club3,
user=self.user,
club_role='OWN',
)
Role.objects.create(
club=self.club4,
user=self.user,
club_role='OWN',
)
Role.objects.create(
club=self.club1,
user=self.user2,
club_role='MOD',
)
Role.objects.create(
club=self.club2,
user=self.user2,
club_role='MEM',
)
Role.objects.create(
club=self.club3,
user=self.user2,
club_role='BAN',
)
| 52.029056
| 263
| 0.678146
| 2,547
| 21,488
| 5.53671
| 0.05693
| 0.111686
| 0.102397
| 0.038718
| 0.932775
| 0.907176
| 0.886045
| 0.871791
| 0.856403
| 0.852574
| 0
| 0.018628
| 0.20807
| 21,488
| 412
| 264
| 52.15534
| 0.810072
| 0.01452
| 0
| 0.705094
| 0
| 0.021448
| 0.197761
| 0.022582
| 0
| 0
| 0
| 0
| 0.380697
| 1
| 0.053619
| false
| 0.045576
| 0.010724
| 0
| 0.069705
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0dec8663a4a73eae1d6cb5127d04d8f7bd41b460
| 4,074
|
py
|
Python
|
NLPFrontEnd/FrontEnd/migrations/0014_auto_20190707_2337.py
|
rodricass/cerberus.com
|
2ca644590608b8109a695a1195206c2fffa728f8
|
[
"Unlicense"
] | null | null | null |
NLPFrontEnd/FrontEnd/migrations/0014_auto_20190707_2337.py
|
rodricass/cerberus.com
|
2ca644590608b8109a695a1195206c2fffa728f8
|
[
"Unlicense"
] | null | null | null |
NLPFrontEnd/FrontEnd/migrations/0014_auto_20190707_2337.py
|
rodricass/cerberus.com
|
2ca644590608b8109a695a1195206c2fffa728f8
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-07-08 02:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('FrontEnd', '0013_auto_20190705_2343'),
]
operations = [
migrations.AddField(
model_name='historicalresultadobusqgeneral',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='historicalresultadobusqgeneral',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='historicalresultadobusqguiada',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='historicalresultadobusqguiada',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='historicalresultadobusqinteligente',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='historicalresultadobusqinteligente',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='historicalresultadobusqinteligentetokens',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='historicalresultadobusqinteligentetokens',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqgeneral',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqgeneral',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqguiada',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqguiada',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqinteligente',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqinteligente',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqinteligentetokens',
name='parrafo_ant',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='resultadobusqinteligentetokens',
name='parrafo_next',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='historicalresultadobusqinteligentetokens',
name='parrafo',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='resultadobusqinteligentetokens',
name='parrafo',
field=models.TextField(),
),
]
| 33.121951
| 66
| 0.573883
| 301
| 4,074
| 7.571429
| 0.172757
| 0.071084
| 0.157964
| 0.201404
| 0.914875
| 0.865731
| 0.865731
| 0.827556
| 0.827556
| 0.827556
| 0
| 0.012301
| 0.321551
| 4,074
| 122
| 67
| 33.393443
| 0.812229
| 0.016937
| 0
| 0.921739
| 1
| 0
| 0.187656
| 0.116692
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017391
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
21fd95b459168dd1de1ddaad3c7965640af04337
| 110
|
py
|
Python
|
flowgraph/integration_tests/data/numpy_meshgrid.py
|
Bhaskers-Blu-Org1/pyflowgraph
|
0a08a978fcab124a27fdb660048d417a80fb082c
|
[
"Apache-2.0"
] | 17
|
2019-01-01T02:03:35.000Z
|
2021-11-18T15:34:50.000Z
|
flowgraph/integration_tests/data/numpy_meshgrid.py
|
Bhaskers-Blu-Org1/pyflowgraph
|
0a08a978fcab124a27fdb660048d417a80fb082c
|
[
"Apache-2.0"
] | 25
|
2018-08-11T03:49:36.000Z
|
2021-11-08T17:26:39.000Z
|
flowgraph/integration_tests/data/numpy_meshgrid.py
|
IBM/pyflowgraph
|
0a08a978fcab124a27fdb660048d417a80fb082c
|
[
"Apache-2.0"
] | 5
|
2018-08-09T21:26:37.000Z
|
2021-07-06T02:58:41.000Z
|
import numpy as np
xx, yy = np.meshgrid(np.linspace(-3, 3, 50),
np.linspace(-3, 3, 50))
| 22
| 44
| 0.518182
| 18
| 110
| 3.166667
| 0.555556
| 0.350877
| 0.385965
| 0.421053
| 0.491228
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106667
| 0.318182
| 110
| 4
| 45
| 27.5
| 0.653333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1d085788126490ee1ae7b3818b9a1fe7de619a1e
| 64,445
|
py
|
Python
|
GAS/run_first_look.py
|
SpandanCh/GAS
|
d85961790f21662ac4fb1c9ba3edc71f92973be4
|
[
"MIT"
] | null | null | null |
GAS/run_first_look.py
|
SpandanCh/GAS
|
d85961790f21662ac4fb1c9ba3edc71f92973be4
|
[
"MIT"
] | null | null | null |
GAS/run_first_look.py
|
SpandanCh/GAS
|
d85961790f21662ac4fb1c9ba3edc71f92973be4
|
[
"MIT"
] | null | null | null |
import os
import textwrap
import warnings
import numpy as np
from astropy.table import Table, join
import astropy.units as u
from spectral_cube import SpectralCube
from pyspeckit.spectrum.models.ammonia_constants import voff_lines_dict
from skimage.morphology import disk,erosion
from . import first_look
from . import gasPipeline
from . import catalogs
from . import baseline
def FirstLook(regions=None, file_extension=None, release='all', trim_edge=True, overwrite=True):
"""
This runs through cubes in a directory tree and generates first
look products for each of them. This assumes a directory naming
convention specified in our observing logs.
----------
regions : list
List of region names (strings) to be included. If empty, all
regions in the log file are searched for and reduced.
release : string
Name of data release. Must match boolean column name in the
Observation Log.
file_extension : string
Name of file extensions to be searched for. Defaults to release name.
trim_edge : boolean
If true, use disk erosion to mask noisy map edges
overwrite : boolean
Note: The GAS file naming convention is
REGION_LINENAME_EXTENSION.fits. For example, for NGC1333 in
ammonia (1,1), this would look for
NGC1333_NH3_11_all.fits
"""
if file_extension is None:
file_extension = '_'+release
RegionCatalog = catalogs.GenerateRegions(release=release)
if regions is None:
RegionCatalog = catalogs.GenerateRegions(release=release)
else:
RegionCatalog = catalogs.GenerateRegions(release=release)
keep = [idx for idx, row in enumerate(RegionCatalog) if row['Region name'] in regions]
RegionCatalog = RegionCatalog[keep]
for ThisRegion in RegionCatalog:
region_name=ThisRegion['Region name']
print("Now NH3(1,1)")
vsys = ThisRegion['VAVG']*u.km/u.s
throw = 2*u.km/u.s + ThisRegion['VRANGE']*u.km/u.s/2
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
voff11 = voff_lines_dict['oneone']
try:
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
if trim_edge:
s = trim_edge_spectral_cube(s)
mask = np.ones(s.shape[0],dtype=np.bool)
for deltav in voff11:
mask*=(np.abs(s.spectral_axis-(deltav*u.km/u.s+vsys)) > throw)
'''
a_rms = (np.where(mask != np.roll(mask,1)))[0]
b_rms = (np.where(mask != np.roll(mask,-1)))[0]
index_rms=first_look.create_index(a_rms, b_rms)
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_in, index_rms=index_rms, index_peak=index_peak)
'''
cube_rms = s.with_mask(mask[:,None,None])
rms = cube_rms.std(axis=0)
mom_mask = ~mask
cube_mom = s.with_mask(mom_mask[:,None,None])
mom_0 = cube_mom.moment(order=0)
mom_1 = cube_mom.moment(order=1)
rms.write(file_in.replace('.fits','_rms.fits'),overwrite=overwrite)
mom_0.write(file_in.replace('.fits','_mom0.fits'),overwrite=overwrite)
mom_1.write(file_in.replace('.fits','_mom1.fits'),overwrite=overwrite)
except IOError:
warnings.warn("File not found: {0}".format(file_in))
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
try:
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
mask = (np.abs(s.spectral_axis-vsys) > throw)
'''
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_in, index_rms=index_rms,
index_peak=index_peak)
'''
cube_rms = s.with_mask(mask[:,None,None])
rms = cube_rms.std(axis=0)
mom_mask = ~mask
cube_mom = s.with_mask(mom_mask[:,None,None])
mom_0 = cube_mom.moment(order=0)
mom_1 = cube_mom.moment(order=1)
rms.write(file_in.replace('.fits','_rms.fits'),overwrite=overwrite)
mom_0.write(file_in.replace('.fits','_mom0.fits'),overwrite=overwrite)
mom_1.write(file_in.replace('.fits','_mom1.fits'),overwrite=overwrite)
except IOError:
warnings.warn("File not found {0}".format(file_in))
def trim_edge_spectral_cube(scube):
""" trim_edge_cube: Function that reads in a spectral cube and removes the edges
in the cube.
It runs the erode function to make sure that pixels within 3 pixels away
from the edges are blanked.
This is useful to remove very noisy pixels due to lower coverage by KFPA.
Updated earlier function to work with spectral cube instance
----------------------------------------
Warning: This function modifies the cube.
"""
#
mask = np.isfinite(scube)
if len(scube.shape) == 2:
mask_2d = mask[:,:]
else:
mask_2d = mask[0,:,:]
# remove image edges
mask_2d[:,0] = mask_2d[:,-1] = False
mask_2d[0,:] = mask_2d[-1,:] = False
# now erode image (using disk) and convert back to 3D mask
# then replace all voxels with NaN
mask &= erosion(mask_2d,disk(3))
scube = scube.with_mask(mask)
scube_erode = scube.with_fill_value(np.nan)
return scube_erode
def plot_all_moments(file_extension='base_all'):
# Get list of regions - run from images/ directory
# Assume directories correspond to regions to be imaged
region_list = glob("*/")
for i in range(len(region_list)):
region_list[i] = region_list[i].strip("/")
line_list = ['NH3_11','NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
label_list = ['NH$_3$(1,1)','NH$_3$(2,2)','NH$_3$(3,3)','C$_2$S','HC$_5$N',
'HC$_7$N (21-20)','HC$_7$N (22-21)']
extension = file_extension
color_table='magma'
text_color='black'
text_size = 14
beam_color='#d95f02' # previously used '#E31A1C'
# Try single set of contours for first look images
w11_step = 0.3
cont_levs=2**np.arange( 0,20)*w11_step
# Masking of small (noisy) regions
selem = np.array([[0,1,0],[1,1,1],[0,1,0]])
for region in region_list:
file_w11='{0}/{0}_NH3_11_{1}_mom0.fits'.format(region,extension)
if os.path.isfile(file_w11):
LowestContour= cont_levs[0]*0.5
w11_hdu = fits.open(file_w11)
map = w11_hdu[0].data
mask = binary_opening(map > LowestContour, selem)
MaskedMap = mask*map
w11_hdu[0].data = MaskedMap
for i in range(len(line_list)):
line_i=line_list[i]
label_i=label_list[i]
file_mom0='{0}/{0}_{1}_{2}_mom0.fits'.format(region,line_i,extension)
if os.path.isfile(file_mom0):
line_hdu = fits.open(file_mom0)
# Use percentiles to set initial plot colourscale ranges
v_min=np.nanpercentile(line_hdu[0].data,0.1)
v_max=np.nanpercentile(line_hdu[0].data,99.9)
fig=aplpy.FITSFigure(file_mom0, hdu=0)
if line_i == 'NH3_11':
fig.show_colorscale(cmap=color_table,vmin=v_min, vmax=v_max, stretch='log',
vmid=v_min-(1.*np.abs(v_min)),cbar_ticks = [0,3,6,12,24,48,96])
# add colorbar
fig.add_colorbar()
#fig.colorbar.set_width(0.15)
fig.colorbar.show(box_orientation='horizontal', width=0.1, pad=0.0, ticks=cbar_ticks,
location='top', axis_label_text='Integrated Intensity (K km s$^{-1}$)')
elif (line_i in ['NH3_22','NH3_33']) :
fig.show_colorscale(cmap=color_table,vmin=v_min, vmax=v_max, stretch='linear',
vmid=v_min-(1.*np.abs(v_min)),cbar_ticks = [0,1,2,3,6,12])
# add colorbar
fig.add_colorbar()
#fig.colorbar.set_width(0.15)
fig.colorbar.show(box_orientation='horizontal', width=0.1, pad=0.0, ticks= cbar_ticks,
location='top', axis_label_text='Integrated Intensity (K km s$^{-1}$)')
else:
fig.show_colorscale( cmap=color_table,vmin=v_min, vmax=v_max)
# add colorbar
fig.add_colorbar()
#fig.colorbar.set_width(0.15)
fig.colorbar.show( box_orientation='horizontal', width=0.1, pad=0.0,
location='top', axis_label_text='Integrated Intensity (K km s$^{-1}$)')
fig.colorbar.set_font(family='sans_serif',size=text_size)
fig.colorbar.set_axis_label_font(family='sans_serif',size=text_size)
fig.set_nan_color('0.95')
#
fig.show_contour(w11_hdu, colors='gray', levels=cont_levs)
# Axis labels
fig.axis_labels.set_font(family='sans_serif',size=text_size)
# Ticks
fig.ticks.set_color(text_color)
fig.tick_labels.set_font(family='sans_serif',size=text_size)
fig.tick_labels.set_style('colons')
fig.tick_labels.set_xformat('hh:mm:ss')
fig.tick_labels.set_yformat('dd:mm')
# Add beam
fig.add_beam(major=0.0088441,minor=0.0088441,angle=0)
fig.beam.set_color(beam_color)
fig.beam.set_corner('bottom left')
'''
# Scale bar
# magic line of code to obtain scale in arcsec obtained from
# http://www.astropy.org/astropy-tutorials/Quantities.html
ang_sep = (plot_param['scalebar_size'].to(u.au)/plot_param['distance']).to(u.arcsec, equivalencies dimensionless_angles())
fig.add_scalebar(ang_sep.to(u.degree))
fig.scalebar.set_corner(plot_param['scalebar_pos'])
fig.scalebar.set_font(family='sans_serif',size=text_size)
fig.scalebar.set(color=text_color)
fig.scalebar.set_label('{0:4.2f}'.format(plot_param['scalebar_size']))
'''
# Labels
fig.add_label(0.025, 0.1,
'{0}\n{1}'.format(region,label_i),
relative=True, color=text_color,
horizontalalignment='left',
family='sans_serif',size=text_size)
# fig.set_system_latex(True)
fig.save( 'figures/{0}_{1}_{2}_mom0_map.pdf'.format(region,line_i,extension),adjust_bbox=True)
fig.close()
else:
print('File {0} not found'.format(file_mom0))
else:
print('File {0} not found'.format(file_w11))
def FirstLook_OrionA(file_extension='_all'):
"""
Function to create First Look products for OrionA. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='OrionA'
vsys = 10.*u.km/u.s
throw = 4.0*u.km/u.s
print("Now NH3(1,1)")
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
spaxis = s.spectral_axis.value
index_rms = baseline.ammoniaWindow(spaxis,spaxis,window=4,v0=vsys.value)
index_peak= ~baseline.tightWindow(spaxis,spaxis,window=3,v0=vsys.value)
first_look.peak_rms( file_in, index_rms=index_rms, index_peak=index_peak)
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
first_look.peak_rms( file_in, index_rms=index_rms,
index_peak=index_peak)
#region_name='OrionA'
#print("Now NH3(1,1)")
#a_rms = [ 0, 158, 315, 428, 530, 693]
#b_rms = [ 60, 230, 327, 438, 604, 735]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(326,470)
#file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
## 2nd order polynomial
# file_out=file_in.replace('.fits','_base2.fits')
#file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
#first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
#
#print("Now NH3(2,2)")
#a_rms = [ 0, 260, 520, 730]
#b_rms = [150, 380, 610, 850]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(380,520)
#line='NH3_22'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
## 2nd order polynomial
#file_out=file_in.replace('.fits','_base2.fits')
#file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
#first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
#
#print("Now NH3(3,3)")
#a_rms = [ 10, 250, 530]
#b_rms = [210, 310, 930]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(410,540)
#line='NH3_33'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
##2nd order polynomial
#file_out=file_in.replace('.fits','_base2.fits')
#file_new=first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
#first_look.peak_rms( file_new, index_rms=index_rms, index_peak=index_peak)
#
#print("Now CCS")
#a_rms = [ 0, 260]
#b_rms = [200, 490]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(220,250)
#line='C2S'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now HC5N")
# HC5N channel range must be updated
#a_rms = [ 0, 500]
#b_rms = [380, 545]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(400,480)
#line='HC5N'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now HC7N 21-20")
# HC7N channel range must be updated
#a_rms = [ 0, 160, 480]
#b_rms = [115, 360, 525]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(400,460)
#line='HC7N_21_20'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now HC7N 22-21")
# HC7N channel range must be updated
#a_rms = [ 0, 480]
#b_rms = [360, 525]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(400,460)
#line='HC7N_22_21'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=2)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
def FirstLook_B18(file_extension='_all'):
"""
Function to create First Look products for B18. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='B18'
vsys = 6.*u.km/u.s
throw = 2.0*u.km/u.s
print("Now NH3(1,1)")
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
spaxis = s.spectral_axis.value
index_rms = baseline.ammoniaWindow(spaxis,spaxis,window=4,v0=vsys.value)
index_peak= ~baseline.tightWindow(spaxis,spaxis,window=3,v0=vsys.value)
first_look.peak_rms( file_in, index_rms=index_rms, index_peak=index_peak)
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
first_look.peak_rms( file_in, index_rms=index_rms,
index_peak=index_peak)
#region_name='B18'
#print("Now NH3(1,1)")
#a_rms = [ 0, 115, 280, 385, 490, 655]
#b_rms = [ 80, 230, 345, 455, 625, 760]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(352,381)
#file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now NH3(2,2)")
#a_rms = [ 0, 440]
#b_rms = [ 409, 870]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(420,435)
#line='NH3_22'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now NH3(3,3)")
#a_rms = [ 0, 530]
#b_rms = [ 409, 960]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(410,485)
#line='NH3_33'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now CCS")
#a_rms = [ 0, 245]
#b_rms = [ 210, 490]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(225,243)
#line='C2S'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now HC5N")
#a_rms = [ 10, 435]
#b_rms = [ 409, 540]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(414,430)
#line='HC5N'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now HC7N_21_20")
#a_rms = [ 10, 435]
#b_rms = [ 409, 540]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(412,430)
#line='HC7N_21_20'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
#print("Now HC7N_22_21")
#a_rms = [ 10, 435]
#b_rms = [ 409, 540]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(412,430)
#line='HC7N_22_21'
#file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
#first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
#first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
def FirstLook_L1688(file_extension='_all'):
"""
Function to create First Look products for L1688. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='L1688'
vsys = 3.5*u.km/u.s
throw = 5*u.km/u.s
print("Now NH3(1,1)")
#a_rms = [ 0, 121, 290, 404, 505, 665]
#b_rms = [ 74, 239, 332, 447, 611, 749]
#index_rms=first_look.create_index( a_rms, b_rms)
#index_peak=np.arange(350,377)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
# file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
# first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
spaxis = s.spectral_axis.value
index_rms = baseline.ammoniaWindow(spaxis,spaxis,window=4,v0=vsys.value)
index_peak= ~baseline.tightWindow(spaxis,spaxis,window=3,v0=vsys.value)
first_look.peak_rms( file_in, index_rms=index_rms, index_peak=index_peak)
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
#file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
# first_look.baseline( file_in, file_out,
# index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_in, index_rms=index_rms,
index_peak=index_peak)
# print("Now NH3(2,2)")
# a_rms = [ 0, 349]
# b_rms = [ 285, 649]
# index_rms=first_look.create_index( a_rms, b_rms)
# index_peak=np.arange(298,342)
# line='NH3_22'
# file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
# file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
# first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
# first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
# #
# print("Now NH3(3,3)")
# a_rms = [ 0, 395]
# b_rms = [ 272, 649]
# index_rms=first_look.create_index( a_rms, b_rms)
# index_peak=np.arange(298,342)
# line='NH3_33'
# file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
# file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
# first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
# first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
# #
# print("Now CCS")
# a_rms = [ 0, 369]
# b_rms = [ 278, 649]
# index_rms=first_look.create_index( a_rms, b_rms)
# index_peak=np.arange(307,325)
# line='C2S'
# file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
# file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
# first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
# first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
# #
# print("Now HC5N")
# a_rms = [ 0, 358]
# b_rms = [ 288, 649]
# index_rms=first_look.create_index( a_rms, b_rms)
# index_peak=np.arange(306,317)
# line='HC5N'
# file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
# file_out=file_in.replace(file_extension+'.fits',
# '_base'+file_extension+'.fits')
# first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
# first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
# #
# #HC7N (21-20) shows an absorption feature at ~ 91 km/s (at 23.6951 GHz)
# #from its rest frequency (used 23.6879 GHz). There's no emission line.
# #Below are the channel indeces for the absorption feature.
# #a_rms = [ 0, 520]
# #b_rms = [480, 650]
# #index_peak = np.arange(485,510)
# #
# #The code didn't produce the fits file for HC7N (22-21).
def FirstLook_L1689(file_extension='_all'):
"""
Function to create First Look products for L1689. The
file_extension parameter is used to select the proper files to be
processed.
"""
region_name='L1689'
print("Now NH3(1,1)")
a_rms = [ 0, 150, 310, 420, 530, 690]
b_rms = [ 60, 230, 330, 440, 610, 760]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(340,420)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 3.9*u.km/u.s
throw = 5*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_SerAqu(file_extension='_all'):
"""
Function to create First Look products for Serpens_Aquila. The
file_extension parameter is used to select the proper files to be
processed.
"""
region_name='Serpens_Aquila'
print("Now NH3(1,1)")
a_rms = [ 0, 150, 310, 420, 530, 690]
b_rms = [ 60, 230, 330, 440, 610, 780]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(340,420)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 6.35*u.km/u.s
throw = 8*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_L1455(file_extension='_all'):
"""
Function to create First Look products for L1455. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='L1455'
print("Now NH3(1,1)")
a_rms = [ 0, 140, 300, 410, 520, 680]
b_rms = [ 105, 270, 370, 480, 630, 745]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,430)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
a_rms = [ 0, 340]
b_rms = [ 290, 648]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(260,400)
line='NH3_22'
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(3,3)")
a_rms = [ 0, 340] # No lines. Using the same as NH3(2,2)
b_rms = [ 290, 648] # No lines. Using the same as NH3(2,2)
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(260,400) # No lines. Using the same as NH3(2,2)
line='NH3_33'
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now CCS")
a_rms = [ 0, 350]
b_rms = [ 290, 648]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(309,334)
line='C2S'
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now HC5N")
a_rms = [ 0, 350]
b_rms = [ 290, 648]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(315,325)
line='HC5N'
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now HC7N_21_20")
a_rms = [ 0, 180]
b_rms = [ 130, 275]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(128,147)
line='HC7N_21_20'
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now HC7N_22_21")
a_rms = [ 0, 340] # No lines. Using the same as HC7N_21_20
b_rms = [ 290, 648] # No lines. Using the same as HC7N_21_20
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(308,328) # No lines. Using the same as HC7N_21_20
line='HC7N_22_21'
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
def FirstLook_NGC1333(file_extension='_all'):
"""
Function to create First Look products for NGC1333. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='NGC1333'
vsys = 7.9*u.km/u.s
throw = 2.0*u.km/u.s
print("Now NH3(1,1)")
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
spaxis = s.spectral_axis.value
index_rms = baseline.ammoniaWindow(spaxis,spaxis,window=4,v0=vsys.value)
index_peak= ~baseline.tightWindow(spaxis,spaxis,window=3,v0=vsys.value)
first_look.peak_rms( file_in, index_rms=index_rms, index_peak=index_peak)
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
first_look.peak_rms( file_in, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_B1(file_extension='_all'):
"""
Function to create First Look products for B1. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='B1'
print("Now NH3(1,1)")
a_rms = [ 0, 130, 290, 400, 500, 660]
b_rms = [ 70, 240, 340, 440, 620, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(340,400)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms,
polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now the rest")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 6.6*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_IC348(file_extension='_all'):
"""
Function to create First Look products for IC348. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='IC348'
print("Now NH3(1,1)")
a_rms = [ 0, 130, 290, 400, 500, 660]
b_rms = [ 70, 240, 340, 440, 620, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(340,400)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms,
polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 9.0*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_B59(file_extension='_all'):
"""
Function to create First Look products for B59. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name='B59'
print("Now NH3(1,1)")
a_rms = [ 0, 130, 290, 400, 500, 660]
b_rms = [ 70, 240, 340, 440, 620, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(340,400)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 3.5*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_Cepheus_L1228(file_extension='_all'):
"""
Function to create First Look products for Cepheus L1228. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name = 'Cepheus_L1228'
print("Now NH3(1,1)")
a_rms = [ 0, 135, 290, 405, 505, 665]
b_rms = [ 70, 245, 350, 455, 625, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,410)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = -8.0*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_Cepheus_L1251(file_extension='_all'):
"""
Function to create First Look products for Cepheus_L1251. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name = 'Cepheus_L1251'
print("Now NH3(1,1)")
a_rms = [ 0, 135, 290, 405, 505, 665]
b_rms = [ 70, 245, 350, 455, 625, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,410)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = -3.8*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_B1E(file_extension='_all'):
"""
Function to create First Look products for B1E. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name = 'B1E'
print("Now NH3(1,1)")
a_rms = [ 0, 135, 290, 405, 505, 665]
b_rms = [ 70, 245, 350, 455, 625, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,410)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 7.3*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_HC2(file_extension='_all'):
"""
Function to create First Look products for Heiles cloud2. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name = 'HC2'
print("Now NH3(1,1)")
a_rms = [ 0, 135, 290, 405, 505, 665]
b_rms = [ 70, 245, 350, 455, 625, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,410)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
#
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 5.3*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_OrionB_NGC2023_2024(file_extension='_all'):
"""
Function to create First Look products for OrionB NGC2023-2024. The
file_extension parameter is used to select the proper files to be
processed.
"""
region_name = 'OrionB_NGC2023-2024'
print("Now NH3(1,1)")
a_rms = [ 0, 150, 310, 420, 520, 680]
b_rms = [ 70, 225, 325, 435, 600, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,410)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
#
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 10.2*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_OrionB_NGC2068_2071(file_extension='_all'):
"""
Function to create First Look products for OrionB_NGC2068_2071. The
file_extension parameter is used to select the proper files to be
processed.
"""
region_name = 'OrionB_NGC2068-2071'
print("Now NH3(1,1)")
a_rms = [ 0, 120, 270, 390, 480, 640]
b_rms = [ 60, 230, 330, 440, 600, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(330,390)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
#
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 10.0*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_L1451(file_extension='_all'):
"""
Function to create First Look products for L1451. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name = 'L1451'
print("Now NH3(1,1)")
a_rms = [ 0, 155, 310, 420, 525, 680]
b_rms = [ 70, 245, 350, 460, 625, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,415)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
#
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 4.3*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_IC5146(file_extension='_all'):
"""
Function to create First Look products for IC5146. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name = 'IC5146'
print("Now NH3(1,1)")
a_rms = [ 0, 135, 290, 405, 505, 660]
b_rms = [ 70, 235, 340, 445, 615, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,410)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
#
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 4.0*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_template(file_extension='_all'):
"""
Function to create First Look products for TEMPLATE. The file_extension
parameter is used to select the proper files to be processed.
"""
region_name = 'TEMPLATE'
print("Now NH3(1,1)")
a_rms = [ 0, 135, 290, 405, 505, 665]
b_rms = [ 70, 245, 350, 455, 625, 740]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(350,410)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
#
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
print("Now NH3(2,2)")
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 7.3*u.km/u.s
throw = 2.0*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+3*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-3*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
def FirstLook_SerMWC(file_extension='_all'):
"""
Function to create First Look products for Serpens_Aquila. The
file_extension parameter is used to select the proper files to be
processed.
"""
region_name='Serpens_MWC297'
print("Now NH3(1,1)")
a_rms = [ 0, 150, 310, 420, 530, 690]
b_rms = [ 60, 230, 330, 440, 610, 780]
index_rms=first_look.create_index( a_rms, b_rms)
index_peak=np.arange(340,420)
file_in='{0}/{0}_NH3_11{1}.fits'.format(region_name,file_extension)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out, index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms, index_peak=index_peak)
#
linelist = ['NH3_22','NH3_33','C2S','HC5N','HC7N_21_20','HC7N_22_21']
vsys = 6.35*u.km/u.s
throw = 8*u.km/u.s
for line in linelist:
file_in = '{0}/{0}_{1}{2}.fits'.format(region_name,line,file_extension)
s = SpectralCube.read(file_in)
s = s.with_spectral_unit(u.km/u.s,velocity_convention='radio')
a_rms = [s.closest_spectral_channel(vsys+2*throw),
s.closest_spectral_channel(vsys-throw)]
b_rms = [s.closest_spectral_channel(vsys+throw),
s.closest_spectral_channel(vsys-2*throw)]
index_peak = np.arange(s.closest_spectral_channel(vsys+3*u.km/u.s),
s.closest_spectral_channel(vsys-3*u.km/u.s))
index_rms=first_look.create_index( a_rms, b_rms)
file_out=file_in.replace(file_extension+'.fits',
'_base'+file_extension+'.fits')
first_look.baseline( file_in, file_out,
index_clean=index_rms, polyorder=1)
first_look.peak_rms( file_out, index_rms=index_rms,
index_peak=index_peak)
| 47.525811
| 143
| 0.622934
| 9,442
| 64,445
| 3.959966
| 0.06058
| 0.056486
| 0.047285
| 0.075047
| 0.874753
| 0.865686
| 0.860845
| 0.859829
| 0.852715
| 0.842792
| 0
| 0.056796
| 0.245124
| 64,445
| 1,355
| 144
| 47.560886
| 0.711787
| 0.234138
| 0
| 0.768953
| 0
| 0
| 0.083738
| 0.011799
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027678
| false
| 0
| 0.015644
| 0
| 0.044525
| 0.049338
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
df8554c81bd0eb980b3145b5ad27fbba7ea10b66
| 5,242
|
py
|
Python
|
DatagramIterator.py
|
ethanlindley/datagram-python
|
09deaee853cfb084a7e619a12afe8b0221e6edae
|
[
"MIT"
] | 1
|
2018-05-17T07:04:09.000Z
|
2018-05-17T07:04:09.000Z
|
DatagramIterator.py
|
ethanlindley/datagram-python
|
09deaee853cfb084a7e619a12afe8b0221e6edae
|
[
"MIT"
] | null | null | null |
DatagramIterator.py
|
ethanlindley/datagram-python
|
09deaee853cfb084a7e619a12afe8b0221e6edae
|
[
"MIT"
] | null | null | null |
import struct
from .Datagram import Datagram
class DatagramIterator:
def __init__(self, dg, index=0):
self.dg = dg
# let's keep track of our postition in the datagram
self.index = index
def get_bool(self):
return self.get_uint8() != 0
def get_string(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = data[self.index]
self.index += 1
return temp
def get_string32(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = data[self.index]
self.index += 1
return temp
def get_z_string(self):
# TODO
pass
def get_fixed_string(self, size):
# TODO
pass
def get_wstring(self):
# TODO
pass
# little endian numeric packing
def get_int8(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<i', data[self.index])
self.index += 1
return temp[0]
def get_int16(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<i', data[self.index])
self.index += 1
return temp[0]
def get_int32(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<i', data[self.index])
self.index += 1
return temp[0]
def get_int64(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<i', data[self.index])
self.index += 1
return temp[0]
def get_uint8(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<I', data[self.index])
self.index += 1
return temp[0]
def get_uint16(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<I', data[self.index])
self.index += 1
return temp[0]
def get_uint32(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<I', data[self.index])
self.index += 1
return temp[0]
def get_uint64(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<I', data[self.index])
self.index += 1
return temp[0]
def get_float32(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<f', data[self.index])
self.index += 1
return temp[0]
def get_float64(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('<f', data[self.index])
self.index += 1
return temp[0]
def get_be_int16(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>i', data[self.index])
self.index += 1
return temp[0]
def get_be_int32(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>i', data[self.index])
self.index += 1
return temp[0]
def get_be_int64(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>i', data[self.index])
self.index += 1
return temp[0]
def get_be_uint16(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>I', data[self.index])
self.index += 1
return temp[0]
def get_be_uint32(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>I', data[self.index])
self.index += 1
return temp[0]
def get_be_uint64(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>I', data[self.index])
self.index += 1
return temp[0]
def get_be_float32(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>f', data[self.index])
self.index += 1
return temp[0]
def get_be_float64(self):
if self.index < self.dg.get_length():
data = self.dg.get_data()
temp = struct.unpack('>f', data[self.index])
self.index += 1
return temp[0]
def get_remaining_size(self):
if self.index < self.dg.get_length():
temp = self.dg.get_data()
data = []
i = self.index
while(i < self.dg.get_length()):
data.append(temp[i])
i += 1
return bytes(data)
| 29.615819
| 59
| 0.512018
| 680
| 5,242
| 3.823529
| 0.086765
| 0.218077
| 0.148846
| 0.126923
| 0.850385
| 0.843077
| 0.843077
| 0.843077
| 0.831538
| 0.831538
| 0
| 0.023221
| 0.359214
| 5,242
| 176
| 60
| 29.784091
| 0.750819
| 0.017932
| 0
| 0.727273
| 0
| 0
| 0.007001
| 0
| 0
| 0
| 0
| 0.005682
| 0
| 1
| 0.181818
| false
| 0.020979
| 0.013986
| 0.006993
| 0.356643
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
10ca690c28d72a3054fba8a3ad52d990e62cf508
| 3,430
|
py
|
Python
|
script_create_train_test.py
|
kunovg/EcoBici
|
f0f22ccff46320e5f9ef238424dc83ca9ddbf50d
|
[
"MIT"
] | null | null | null |
script_create_train_test.py
|
kunovg/EcoBici
|
f0f22ccff46320e5f9ef238424dc83ca9ddbf50d
|
[
"MIT"
] | null | null | null |
script_create_train_test.py
|
kunovg/EcoBici
|
f0f22ccff46320e5f9ef238424dc83ca9ddbf50d
|
[
"MIT"
] | null | null | null |
from datetime import datetime as dt, timedelta as td
import models as m
from sqlalchemy import and_, func, or_, extract
from tqdm import tqdm
start = dt(2015, 1, 1)
end = dt(2017, 1, 1)
minutes_delta = 15
with open('trips_train_3.csv', 'w') as file:
while start < end:
if start.isoweekday() not in range(1,6):
start += td(days=1)
continue
if (start.hour == 0 and start.minute > 30) or start.hour < 5:
start += td(minutes=minutes_delta)
continue
print(start.isoformat())
file.write("%s\n" % m.s.query(m.Trip).filter(and_(
m.Trip.departure_time >= start.isoformat(),
m.Trip.departure_time <= (start + td(minutes=minutes_delta)).isoformat(),
)).count())
start += td(minutes=minutes_delta)
start = dt(2017, 1, 1)
end = dt(2018, 1, 1)
minutes_delta = 15
with open('trips_test_2.csv', 'w') as file:
while start < end:
if start.isoweekday() not in range(1,6):
start += td(days=1)
continue
if (start.hour == 0 and start.minute > 30) or start.hour < 5:
start += td(minutes=minutes_delta)
continue
print(start.isoformat())
file.write("%s\n" % m.s.query(m.Trip).filter(and_(
m.Trip.departure_time >= start.isoformat(),
m.Trip.departure_time <= (start + td(minutes=minutes_delta)).isoformat(),
)).count())
start += td(minutes=minutes_delta)
# Train para los clusters
import json
cluster_list = json.load(open("clusters_list.json"))
for cluster, ids in cluster_list.items():
start = dt(2015, 1, 1)
end = dt(2017, 1, 1)
minutes_delta = 15
with open('cluster_data/trips_train_cluster%s.csv' % cluster, 'w') as file:
pbar = tqdm(total=39673)
while start < end:
if start.isoweekday() not in range(1,6):
start += td(days=1)
continue
if (start.hour == 0 and start.minute > 30) or start.hour < 5:
start += td(minutes=minutes_delta)
continue
file.write("%s\n" % m.s.query(m.Trip).filter(and_(
or_(m.Trip.departure_station.in_(ids), m.Trip.arrival_station.in_(ids)),
m.Trip.departure_time >= start.isoformat(),
m.Trip.departure_time <= (start + td(minutes=minutes_delta)).isoformat(),
)).count())
pbar.update()
start += td(minutes=minutes_delta)
pbar.close()
for cluster, ids in cluster_list.items():
start = dt(2017, 1, 1)
end = dt(2018, 1, 1)
minutes_delta = 15
with open('cluster_data/trips_test_cluster%s.csv' % cluster, 'w') as file:
while start < end:
if start.isoweekday() not in range(1,6):
start += td(days=1)
continue
if (start.hour == 0 and start.minute > 30) or start.hour < 5:
start += td(minutes=minutes_delta)
continue
print(start.isoformat())
file.write("%s\n" % m.s.query(m.Trip).filter(and_(
or_(m.Trip.departure_station.in_(ids), m.Trip.arrival_station.in_(ids)),
m.Trip.departure_time >= start.isoformat(),
m.Trip.departure_time <= (start + td(minutes=minutes_delta)).isoformat(),
)).count())
start += td(minutes=minutes_delta)
| 38.111111
| 89
| 0.567638
| 459
| 3,430
| 4.124183
| 0.167756
| 0.101426
| 0.088748
| 0.133122
| 0.874274
| 0.860539
| 0.860539
| 0.837824
| 0.832541
| 0.799789
| 0
| 0.037634
| 0.295044
| 3,430
| 89
| 90
| 38.539326
| 0.745244
| 0.006706
| 0
| 0.8375
| 0
| 0
| 0.042878
| 0.022026
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.0625
| 0.0375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
10e0d15d363ecc58f79a6d23bfea548a338b48ac
| 14,999
|
py
|
Python
|
psono/administration/tests/session.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 48
|
2018-04-19T15:50:58.000Z
|
2022-01-23T15:58:11.000Z
|
psono/administration/tests/session.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 9
|
2018-09-13T14:56:18.000Z
|
2020-01-17T16:44:33.000Z
|
psono/administration/tests/session.py
|
dirigeant/psono-server
|
a18c5b3c4d8bbbe4ecf1615b210d99fb77752205
|
[
"Apache-2.0",
"CC0-1.0"
] | 11
|
2019-09-20T11:53:47.000Z
|
2021-07-18T22:41:31.000Z
|
from django.urls import reverse
from django.conf import settings
from django.contrib.auth.hashers import make_password
from django.utils import timezone
from rest_framework import status
from datetime import timedelta
import random
import string
import binascii
import os
from restapi import models
from restapi.tests.base import APITestCaseExtended
from restapi.utils import encrypt_with_db_secret
class ReadSessionTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.test_group_obj = models.Group.objects.create(
name = 'Test Group',
public_key = 'a123',
)
self.test_group_ob2 = models.Group.objects.create(
name = 'Test Group',
public_key = 'a123',
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
def test_read_session_success(self):
"""
Tests GET method on session
"""
url = reverse('admin_session')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.get(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['sessions']), 0)
class CreateSessionTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
self.token = models.Token.objects.create(
key=''.join(random.choice(string.ascii_lowercase) for _ in range(64)),
user=self.test_user_obj
)
def test_create_session(self):
"""
Tests PUT method on session
"""
url = reverse('admin_session')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.put(url, data)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class UpdateSessionTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
self.token = models.Token.objects.create(
key=''.join(random.choice(string.ascii_lowercase) for _ in range(64)),
user=self.test_user_obj
)
def test_update_session(self):
"""
Tests POST method on session
"""
url = reverse('admin_session')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.post(url, data)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class DeleteSessionTests(APITestCaseExtended):
def setUp(self):
self.test_email = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@example.com'
self.test_email2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@example.com'
self.test_email_bcrypt = 'a'
self.test_email_bcrypt2 = 'b'
self.test_username = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test1@psono.pw'
self.test_username2 = ''.join(random.choice(string.ascii_lowercase) for _ in range(10)) + 'test2@psono.pw'
self.test_authkey = binascii.hexlify(os.urandom(settings.AUTH_KEY_LENGTH_BYTES)).decode()
self.test_public_key = binascii.hexlify(os.urandom(settings.USER_PUBLIC_KEY_LENGTH_BYTES)).decode()
self.test_private_key = binascii.hexlify(os.urandom(settings.USER_PRIVATE_KEY_LENGTH_BYTES)).decode()
self.test_private_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_private_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key = binascii.hexlify(os.urandom(settings.USER_SECRET_KEY_LENGTH_BYTES)).decode()
self.test_secret_key_nonce = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_secret_key_nonce2 = binascii.hexlify(os.urandom(settings.NONCE_LENGTH_BYTES)).decode()
self.test_user_sauce = '6df1f310730e5464ce23e05fa4eca0de3fe30805fc8cc1d6b37389262e4bd9c3'
self.test_user_obj = models.User.objects.create(
email=self.test_email,
email_bcrypt=self.test_email_bcrypt,
username=self.test_username,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce,
user_sauce=self.test_user_sauce,
is_email_active=True
)
self.admin = models.User.objects.create(
email=self.test_email2,
email_bcrypt=self.test_email_bcrypt2,
username=self.test_username2,
authkey=make_password(self.test_authkey),
public_key=self.test_public_key,
private_key=self.test_private_key,
private_key_nonce=self.test_private_key_nonce2,
secret_key=self.test_secret_key,
secret_key_nonce=self.test_secret_key_nonce2,
user_sauce=self.test_user_sauce,
is_email_active=True,
is_superuser=True
)
self.token = models.Token.objects.create(
key=''.join(random.choice(string.ascii_lowercase) for _ in range(64)),
user=self.test_user_obj
)
def test_delete_session_success(self):
"""
Tests DELETE method on session
"""
url = reverse('admin_session')
data = {
'session_id': self.token.id
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(models.Duo.objects.all().count(), 0)
def test_delete_session_failure_no_admin(self):
"""
Tests DELETE method on session without being an admin
"""
url = reverse('admin_session')
data = {
'session_id': self.token.id
}
self.client.force_authenticate(user=self.test_user_obj)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_delete_session_failure_no_session_id(self):
"""
Tests DELETE method on session without a session id
"""
url = reverse('admin_session')
data = {
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_delete_session_failure_session_id_not_exist(self):
"""
Tests DELETE method on session with a session id that does not exist
"""
url = reverse('admin_session')
data = {
'session_id': '499d3c84-e8ae-4a6b-a4c2-43c79beb069a'
}
self.client.force_authenticate(user=self.admin)
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| 43.100575
| 114
| 0.684112
| 1,857
| 14,999
| 5.204093
| 0.07539
| 0.124172
| 0.056291
| 0.07947
| 0.923531
| 0.919599
| 0.90894
| 0.897661
| 0.896109
| 0.896109
| 0
| 0.025075
| 0.218281
| 14,999
| 347
| 115
| 43.224784
| 0.799147
| 0.019335
| 0
| 0.794677
| 0
| 0
| 0.048467
| 0.020074
| 0
| 0
| 0
| 0
| 0.034221
| 1
| 0.041825
| false
| 0.034221
| 0.04943
| 0
| 0.106464
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80027d271b2b832a4038db11c12c18abf63fb514
| 1,751
|
py
|
Python
|
app/core/models.py
|
danish21752/audio_apis
|
932a74c51e67394b1c3cba0bcd22a636ea11aeb1
|
[
"MIT"
] | null | null | null |
app/core/models.py
|
danish21752/audio_apis
|
932a74c51e67394b1c3cba0bcd22a636ea11aeb1
|
[
"MIT"
] | null | null | null |
app/core/models.py
|
danish21752/audio_apis
|
932a74c51e67394b1c3cba0bcd22a636ea11aeb1
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.postgres.fields import ArrayField
from core import services
class Song(models.Model):
id = models.UUIDField(
primary_key=True,
default=services.CommonService.generate_id,
editable=False,
db_index=True,
)
name = models.CharField(max_length=100, null=False, blank=False)
duration = models.IntegerField(default=0, null=False, blank=False)
uploaded_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
class Podcast(models.Model):
id = models.UUIDField(
primary_key=True,
default=services.CommonService.generate_id,
editable=False,
db_index=True,
)
name = models.CharField(max_length=100, null=False, blank=False)
duration = models.IntegerField(default=0, null=False, blank=False)
uploaded_time = models.DateTimeField(auto_now_add=True)
host = models.CharField(max_length=100, null=False, blank=False)
participants = ArrayField(models.CharField(
max_length=100, blank=True, null=True),
size=10)
def __str__(self):
return self.name
class Audiobook(models.Model):
id = models.UUIDField(
primary_key=True,
default=services.CommonService.generate_id,
editable=False,
db_index=True,
)
title = models.CharField(max_length=100, null=False, blank=False)
author = models.CharField(max_length=100, null=False, blank=False)
narrator = models.CharField(max_length=100, null=False, blank=False)
duration = models.IntegerField(default=0, null=False, blank=False)
uploaded_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
| 31.267857
| 72
| 0.698458
| 218
| 1,751
| 5.440367
| 0.247706
| 0.068297
| 0.106239
| 0.144182
| 0.838954
| 0.816189
| 0.816189
| 0.784148
| 0.784148
| 0.667791
| 0
| 0.018545
| 0.199315
| 1,751
| 55
| 73
| 31.836364
| 0.827389
| 0
| 0
| 0.622222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.066667
| 0.066667
| 0.622222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
338a165dcffb3030d04f841ad00dfd870bc1e703
| 72
|
py
|
Python
|
stan/plotter/__init__.py
|
itsens/stan
|
1632a55bf752bba09f3459e333f2a6970e1148c4
|
[
"MIT"
] | null | null | null |
stan/plotter/__init__.py
|
itsens/stan
|
1632a55bf752bba09f3459e333f2a6970e1148c4
|
[
"MIT"
] | null | null | null |
stan/plotter/__init__.py
|
itsens/stan
|
1632a55bf752bba09f3459e333f2a6970e1148c4
|
[
"MIT"
] | null | null | null |
from .plotly_graph import PlotlyGraph
from .plotly_graph import SarGraph
| 36
| 37
| 0.875
| 10
| 72
| 6.1
| 0.6
| 0.327869
| 0.491803
| 0.688525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097222
| 72
| 2
| 38
| 36
| 0.938462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
33a67873dc20f21cfeb6d0b297a2566c006bf90d
| 196
|
py
|
Python
|
mdutils/__init__.py
|
Alexmod/mdutils
|
97d28346fabb851e068963da6dabfbcaa145c4e0
|
[
"MIT"
] | 110
|
2019-09-10T13:28:40.000Z
|
2022-03-27T23:38:41.000Z
|
mdutils/__init__.py
|
Alexmod/mdutils
|
97d28346fabb851e068963da6dabfbcaa145c4e0
|
[
"MIT"
] | 43
|
2018-05-05T00:56:48.000Z
|
2022-03-27T16:06:37.000Z
|
mdutils/__init__.py
|
Alexmod/mdutils
|
97d28346fabb851e068963da6dabfbcaa145c4e0
|
[
"MIT"
] | 39
|
2019-12-19T03:54:54.000Z
|
2022-03-28T22:08:01.000Z
|
from mdutils.fileutils import fileutils
from mdutils.mdutils import MdUtils
from mdutils.tools import Header, Link, Image, TextUtils, Table, TableOfContents
from mdutils.tools import Html, MDList
| 39.2
| 80
| 0.836735
| 26
| 196
| 6.307692
| 0.5
| 0.268293
| 0.195122
| 0.268293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112245
| 196
| 4
| 81
| 49
| 0.942529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
1d6eb29111ad1ce0c30eef535770cf4ac323a3ad
| 169
|
py
|
Python
|
klar_eda/visualize/__init__.py
|
Sibasish-Padhy/klar-EDA
|
6186981fda87e367a0013d4568e7f2fd094e56ed
|
[
"MIT"
] | 17
|
2020-09-27T12:18:32.000Z
|
2022-02-27T13:54:12.000Z
|
klar_eda/visualize/__init__.py
|
Sibasish-Padhy/klar-EDA
|
6186981fda87e367a0013d4568e7f2fd094e56ed
|
[
"MIT"
] | 32
|
2020-09-27T18:48:23.000Z
|
2022-03-01T04:48:57.000Z
|
klar_eda/visualize/__init__.py
|
Sibasish-Padhy/klar-EDA
|
6186981fda87e367a0013d4568e7f2fd094e56ed
|
[
"MIT"
] | 27
|
2020-09-27T12:30:38.000Z
|
2022-02-24T18:48:30.000Z
|
from . import constants
from . import csv_visualize
from . import image_visualize
from . import visualize
import pkg_resources
pkg_resources.declare_namespace(__name__)
| 24.142857
| 41
| 0.846154
| 22
| 169
| 6.090909
| 0.5
| 0.298507
| 0.283582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112426
| 169
| 6
| 42
| 28.166667
| 0.893333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1da94c6022cfed4414dc46a5f967908db9100af6
| 4,408
|
py
|
Python
|
tests/test_cache_decorator.py
|
sslivkoff/toolcache
|
62fb3441adb03fdee4fdbca14605f0ecec2ad44c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_cache_decorator.py
|
sslivkoff/toolcache
|
62fb3441adb03fdee4fdbca14605f0ecec2ad44c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_cache_decorator.py
|
sslivkoff/toolcache
|
62fb3441adb03fdee4fdbca14605f0ecec2ad44c
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import toolcache
cachetypes = ['memory', 'disk']
@pytest.mark.parametrize('cachetype', cachetypes)
def test_cachetype_memory(cachetype):
function_call_count = {}
@toolcache.cache(cachetype=cachetype)
def test_function(a, b, c):
function_call_count.setdefault(test_function, 0)
function_call_count[test_function] += 1
return a * b * c
output1 = test_function(1, 2, 3)
assert function_call_count[test_function] == 1
output2 = test_function(1, 2, 3)
assert function_call_count[test_function] == 1
assert output1 == output2
@pytest.mark.parametrize('cachetype', cachetypes)
def test_cache_varkw(cachetype):
function_call_count = {}
@toolcache.cache(cachetype=cachetype)
def test_function(**kwargs):
function_call_count.setdefault(test_function, 0)
function_call_count[test_function] += 1
return sum(kwargs.values())
output1 = test_function(a=1, b=2, c=3)
assert function_call_count[test_function] == 1
output2 = test_function(a=1, b=2, c=3)
assert function_call_count[test_function] == 1
assert output1 == output2
output3 = test_function(a=1, b=2, c=4)
assert function_call_count[test_function] == 2
assert output3 == 7
output4 = test_function(a=1, b=2)
assert function_call_count[test_function] == 3
assert output4 == 3
@pytest.mark.parametrize('cachetype', cachetypes)
def test_cache_complex_signature(cachetype):
function_call_count = {}
@toolcache.cache(cachetype=cachetype, normalize_hash_inputs=True)
def f(a, b=88, *, c, d=99, **extra_kwargs):
function_call_count.setdefault(f, 0)
function_call_count[f] += 1
return (a, b, c, d, extra_kwargs)
f(0, c=2)
assert function_call_count[f] == 1
f(a=0, c=2)
assert function_call_count[f] == 1
@pytest.mark.parametrize('cachetype', cachetypes)
def test_cache_unhashable_input(cachetype):
function_call_count = {}
@toolcache.cache(cachetype=cachetype)
def f(a, b):
function_call_count.setdefault(f, 0)
function_call_count[f] += 1
return (a, b)
f(0, b={'a': 4})
assert function_call_count[f] == 1
f(0, b={'a': 4})
assert function_call_count[f] == 1
@pytest.mark.parametrize('cachetype', cachetypes)
def test_cache_hashable_object(cachetype):
function_call_count = {}
@toolcache.cache(cachetype=cachetype)
def f(a, b):
function_call_count.setdefault(f, 0)
function_call_count[f] += 1
return a
class InputObject:
pass
g = InputObject()
f(0, b=g)
assert function_call_count[f] == 1
f(0, b=g)
assert function_call_count[f] == 1
#
# # test particular parameters given to toolcache.cache()
#
@pytest.mark.parametrize('cachetype', cachetypes)
def test_parameter_cache_hash_include_args(cachetype):
@toolcache.cache(cachetype=cachetype, hash_include_args=['a', 'b'])
def test_function(a, b, c):
return a * b * c
cache_instance = test_function.cache
assert cache_instance.get_cache_size() == 0
test_function(1, 2, 3)
assert cache_instance.get_cache_size() == 1
test_function(1, 2, 3)
assert cache_instance.get_cache_size() == 1
test_function(1, 2, 4)
assert cache_instance.get_cache_size() == 1
test_function(1, 2, 5)
assert cache_instance.get_cache_size() == 1
test_function(2, 2, 4)
assert cache_instance.get_cache_size() == 2
test_function(2, 2, 4)
assert cache_instance.get_cache_size() == 2
test_function(2, 3, 4)
assert cache_instance.get_cache_size() == 3
test_function(2, 3, 4)
assert cache_instance.get_cache_size() == 3
@pytest.mark.parametrize('cachetype', cachetypes)
def test_parameter_cache_hash_exclude_args(cachetype):
@toolcache.cache(cachetype=cachetype, hash_exclude_args=['a', 'b'])
def test_function(a, b, c):
return a * b * c
cache_instance = test_function.cache
assert cache_instance.get_cache_size() == 0
test_function(1, 2, 3)
assert cache_instance.get_cache_size() == 1
test_function(1, 2, 3)
assert cache_instance.get_cache_size() == 1
test_function(1, 2, 4)
assert cache_instance.get_cache_size() == 2
test_function(1, 2, 5)
assert cache_instance.get_cache_size() == 3
test_function(2, 2, 4)
assert cache_instance.get_cache_size() == 3
| 28.43871
| 71
| 0.682396
| 625
| 4,408
| 4.5456
| 0.1056
| 0.147835
| 0.161563
| 0.116156
| 0.879268
| 0.873284
| 0.841957
| 0.784583
| 0.726857
| 0.715593
| 0
| 0.03421
| 0.197595
| 4,408
| 154
| 72
| 28.623377
| 0.769013
| 0.012024
| 0
| 0.701754
| 0
| 0
| 0.018169
| 0
| 0
| 0
| 0
| 0
| 0.27193
| 1
| 0.122807
| false
| 0.008772
| 0.017544
| 0.017544
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d51a5b596605b16d5f23adc88a20b79f2e54b532
| 11,495
|
py
|
Python
|
scripts/xdcAddBlocks.py
|
FoRTE-Research/RingRAM
|
091cbc43b2c4088a3564c31f4d0a53731e2b570f
|
[
"MIT"
] | 2
|
2021-12-29T11:54:51.000Z
|
2022-03-01T21:16:29.000Z
|
scripts/xdcAddBlocks.py
|
FoRTE-Research/RingRAM
|
091cbc43b2c4088a3564c31f4d0a53731e2b570f
|
[
"MIT"
] | null | null | null |
scripts/xdcAddBlocks.py
|
FoRTE-Research/RingRAM
|
091cbc43b2c4088a3564c31f4d0a53731e2b570f
|
[
"MIT"
] | null | null | null |
DEFAULT_FPGA='VC709'
def addClock(file, FPGA=DEFAULT_FPGA):
file.write("#Clock Source\r\n")
if(FPGA=='VC709'):
file.write("set_property IOSTANDARD DIFF_SSTL15 [get_ports clk_p]\r\n")
file.write("set_property PACKAGE_PIN H19 [get_ports clk_p]\r\n")
file.write("set_property PACKAGE_PIN G18 [get_ports clk_n]\r\n")
file.write("set_property IOSTANDARD DIFF_SSTL15 [get_ports clk_n]\r\n")
elif(FPGA=='A735'):
file.write("set_property -dict {PACKAGE_PIN E3 IOSTANDARD LVCMOS33} [get_ports CLK]\r\n")
file.write("create_clock -period 10.000 -name sys_clk_pin -waveform {0.000 5.000} -add [get_ports CLK]\r\n")
elif(FPGA=='A7100'):
file.write("set_property -dict {PACKAGE_PIN E3 IOSTANDARD LVCMOS33} [get_ports CLK]\r\n")
file.write("create_clock -period 10.000 -name sys_clk_pin -waveform {0.000 5.000} -add [get_ports CLK]\r\n")
file.write("\r\n")
def addLED(file, FPGA=DEFAULT_FPGA):
file.write("#LEDs\r\n")
if(FPGA=='VC709'):
file.write("set_property -dict { PACKAGE_PIN AM39 IOSTANDARD LVCMOS18 } [get_ports { LED[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AN39 IOSTANDARD LVCMOS18 } [get_ports { LED[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AR37 IOSTANDARD LVCMOS18 } [get_ports { LED[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AT37 IOSTANDARD LVCMOS18 } [get_ports { LED[3] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AR35 IOSTANDARD LVCMOS18 } [get_ports { LED[4] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AP41 IOSTANDARD LVCMOS18 } [get_ports { LED[5] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AP42 IOSTANDARD LVCMOS18 } [get_ports { LED[6] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AU39 IOSTANDARD LVCMOS18 } [get_ports { LED[7] }];\r\n")
elif(FPGA=='A735'):
file.write("set_property -dict { PACKAGE_PIN H5 IOSTANDARD LVCMOS33 } [get_ports { LED[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN J5 IOSTANDARD LVCMOS33 } [get_ports { LED[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN T9 IOSTANDARD LVCMOS33 } [get_ports { LED[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN T10 IOSTANDARD LVCMOS33 } [get_ports { LED[3] }];\r\n")
elif(FPGA=='A7100'):
file.write("set_property -dict { PACKAGE_PIN H5 IOSTANDARD LVCMOS33 } [get_ports { LED[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN J5 IOSTANDARD LVCMOS33 } [get_ports { LED[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN T9 IOSTANDARD LVCMOS33 } [get_ports { LED[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN T10 IOSTANDARD LVCMOS33 } [get_ports { LED[3] }];\r\n")
file.write("\r\n")
def addSwitches(file, FPGA=DEFAULT_FPGA):
file.write("#Switches\r\n")
if(FPGA=='VC709'):
file.write("set_property -dict { PACKAGE_PIN AV30 IOSTANDARD LVCMOS18 } [get_ports { SW[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AY33 IOSTANDARD LVCMOS18 } [get_ports { SW[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN BA31 IOSTANDARD LVCMOS18 } [get_ports { SW[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN BA32 IOSTANDARD LVCMOS18 } [get_ports { SW[3] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AW30 IOSTANDARD LVCMOS18 } [get_ports { SW[4] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AY30 IOSTANDARD LVCMOS33 } [get_ports { SW[5] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN BA30 IOSTANDARD LVCMOS18 } [get_ports { SW[6] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN BB31 IOSTANDARD LVCMOS18 } [get_ports { SW[7] }];\r\n")
elif(FPGA=='A735'):
file.write("set_property -dict { PACKAGE_PIN A8 IOSTANDARD LVCMOS33 } [get_ports { SW[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN C11 IOSTANDARD LVCMOS33 } [get_ports { SW[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN C10 IOSTANDARD LVCMOS33 } [get_ports { SW[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN A10 IOSTANDARD LVCMOS33 } [get_ports { SW[3] }];\r\n")
elif(FPGA=='A7100'):
file.write("set_property -dict { PACKAGE_PIN A8 IOSTANDARD LVCMOS33 } [get_ports { SW[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN C11 IOSTANDARD LVCMOS33 } [get_ports { SW[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN C10 IOSTANDARD LVCMOS33 } [get_ports { SW[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN A10 IOSTANDARD LVCMOS33 } [get_ports { SW[3] }];\r\n")
file.write("\r\n")
def addUART(file, FPGA=DEFAULT_FPGA):
file.write("#Uart\r\n")
if(FPGA=='VC709'):
file.write("set_property -dict { PACKAGE_PIN AU36 IOSTANDARD LVCMOS18 } [get_ports { TX }];\r\n")
elif(FPGA=='A735'):
file.write("set_property -dict { PACKAGE_PIN D10 IOSTANDARD LVCMOS33 } [get_ports { TX }];\r\n")
file.write("#set_property -dict {PACKAGE_PIN A9 IOSTANDARD LVCMOS33 } [get_ports RX];\r\n")
elif(FPGA=='A7100'):
file.write("set_property -dict { PACKAGE_PIN D10 IOSTANDARD LVCMOS33 } [get_ports { TX }];\r\n")
file.write("#set_property -dict {PACKAGE_PIN A9 IOSTANDARD LVCMOS33 } [get_ports RX];\r\n")
file.write("\r\n")
def addButtons(file, FPGA=DEFAULT_FPGA):
file.write("#Buttons\r\n")
if(FPGA=='VC709'):
file.write("set_property -dict { PACKAGE_PIN AV39 IOSTANDARD LVCMOS18 } [get_ports { BUTTON[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AW40 IOSTANDARD LVCMOS18 } [get_ports { BUTTON[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AP40 IOSTANDARD LVCMOS18 } [get_ports { BUTTON[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AU38 IOSTANDARD LVCMOS18 } [get_ports { BUTTON[3] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN AR40 IOSTANDARD LVCMOS18 } [get_ports { BUTTON[4] }];\r\n")
elif(FPGA=='A735'):
file.write("set_property -dict { PACKAGE_PIN D9 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN C9 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN B9 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN B8 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[3] }];\r\n")
elif(FPGA=='A7100'):
file.write("set_property -dict { PACKAGE_PIN D9 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[0] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN C9 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[1] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN B9 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[2] }];\r\n")
file.write("set_property -dict { PACKAGE_PIN B8 IOSTANDARD LVCMOS33 } [get_ports { BUTTON[3] }];\r\n")
file.write("\r\n")
def addClockBlock(file, FPGA=DEFAULT_FPGA):
file.write("create_pblock pblock_clk_div\r\n")
file.write("add_cells_to_pblock [get_pblocks pblock_clk_div] [get_cells -quiet [list clk_div]]\r\n")
if(FPGA=='VC709'):
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {SLICE_X196Y475:SLICE_X221Y499}\r\n")
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {RAMB18_X13Y190:RAMB18_X14Y199}\r\n")
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {RAMB36_X13Y95:RAMB36_X14Y99}\r\n")
elif(FPGA=='A735'):
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {SLICE_X36Y134:SLICE_X57Y149}\r\n")
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {RAMB18_X1Y54:RAMB18_X1Y59}\r\n")
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {RAMB36_X1Y27:RAMB36_X1Y29}\r\n")
file.write("set_property BEL MMCME2_ADV [get_cells clk_div/MMCME2_ADV_inst]\r\n")
file.write("set_property LOC MMCME2_ADV_X1Y1 [get_cells clk_div/MMCME2_ADV_inst]\r\n")
elif(FPGA=='A7100'):
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {SLICE_X66Y134:SLICE_X79Y149}\r\n")
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {RAMB18_X2Y54:RAMB18_X2Y59}\r\n")
file.write("resize_pblock [get_pblocks pblock_clk_div] -add {RAMB36_X2Y27:RAMB36_X2Y29}\r\n")
file.write("set_property BEL MMCME2_ADV [get_cells clk_div/MMCME2_ADV_inst]\r\n")
file.write("set_property LOC MMCME2_ADV_X1Y1 [get_cells clk_div/MMCME2_ADV_inst]\r\n")
file.write("set_property SNAPPING_MODE ON [get_pblocks pblock_clk_div]\r\n")
file.write("\r\n")
def addUARTCtrlBlock(file, FPGA=DEFAULT_FPGA):
file.write("create_pblock pblock_uart_ctrl\r\n")
file.write("add_cells_to_pblock [get_pblocks pblock_uart_ctrl] [get_cells -quiet [list uart_ctrl]]\r\n")
if(FPGA=='VC709'):
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {SLICE_X170Y475:SLICE_X195Y499}\r\n")
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {RAMB18_X11Y190:RAMB18_X12Y199}\r\n")
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {RAMB36_X11Y95:RAMB36_X12Y99}\r\n")
elif(FPGA=='A735'):
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {SLICE_X36Y118:SLICE_X57Y133}\r\n")
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {RAMB18_X1Y50:RAMB18_X1Y51}\r\n")
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {RAMB36_X1Y25:RAMB36_X1Y25}\r\n")
elif(FPGA=='A7100'):
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {SLICE_X52Y134:SLICE_X65Y149}\r\n")
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {RAMB18_X1Y54:RAMB18_X1Y59}\r\n")
file.write("resize_pblock [get_pblocks pblock_uart_ctrl] -add {RAMB36_X1Y27:RAMB36_X1Y29}\r\n")
file.write("set_property SNAPPING_MODE ON [get_pblocks pblock_uart_ctrl]\r\n")
file.write("\r\n")
def addRRAMCtrlBlock(file, FPGA=DEFAULT_FPGA):
file.write("create_pblock pblock_rram_ctrl\r\n")
file.write("add_cells_to_pblock [get_pblocks pblock_rram_ctrl] [get_cells -quiet [list rram_ctrl instdb]]\r\n")
if(FPGA=='VC709'):
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {SLICE_X170Y0:SLICE_X221Y474}\r\n")
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {RAMB18_X11Y0:RAMB18_X14Y189}\r\n")
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {RAMB36_X11Y0:RAMB36_X14Y94}\r\n")
elif(FPGA=='A735'):
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {SLICE_X36Y0:SLICE_X65Y99}\r\n")
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {RAMB18_X1Y0:RAMB18_X2Y39}\r\n")
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {RAMB36_X1Y0:RAMB36_X2Y19}\r\n")
elif(FPGA=='A7100'):
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {SLICE_X52Y0:SLICE_X79Y133}\r\n")
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {RAMB18_X1Y0:RAMB18_X2Y51}\r\n")
file.write("resize_pblock [get_pblocks pblock_rram_ctrl] -add {RAMB36_X1Y0:RAMB36_X2Y25}\r\n")
file.write("set_property SNAPPING_MODE ON [get_pblocks pblock_rram_ctrl]\r\n")
file.write("\r\n")
| 70.95679
| 116
| 0.682645
| 1,727
| 11,495
| 4.297626
| 0.097279
| 0.1346
| 0.063864
| 0.117084
| 0.910671
| 0.840609
| 0.816761
| 0.802075
| 0.7917
| 0.771355
| 0
| 0.067092
| 0.167551
| 11,495
| 161
| 117
| 71.397516
| 0.708538
| 0
| 0
| 0.472222
| 0
| 0.013889
| 0.712894
| 0.088655
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d539eb66b0fa379721ef3cfb091a9bcebe71a54c
| 92
|
py
|
Python
|
rick_db/version.py
|
oddbit-project/rick_db
|
02910c071f3ad58fdd88b2a27bfdd2bc61497d42
|
[
"MIT"
] | null | null | null |
rick_db/version.py
|
oddbit-project/rick_db
|
02910c071f3ad58fdd88b2a27bfdd2bc61497d42
|
[
"MIT"
] | null | null | null |
rick_db/version.py
|
oddbit-project/rick_db
|
02910c071f3ad58fdd88b2a27bfdd2bc61497d42
|
[
"MIT"
] | null | null | null |
RICK_DB_VERSION = ["0", "9", "4"]
def get_version():
return ".".join(RICK_DB_VERSION)
| 15.333333
| 36
| 0.619565
| 14
| 92
| 3.714286
| 0.714286
| 0.230769
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038961
| 0.163043
| 92
| 5
| 37
| 18.4
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d55347b62cc63c59ce8fb37dbbab895c5123dc8c
| 110
|
py
|
Python
|
math-question-generator.py
|
happyhipster/math-question-generator
|
e626946611df6c5bda5130dcbd2c9dc43f4dbcd9
|
[
"Apache-2.0"
] | null | null | null |
math-question-generator.py
|
happyhipster/math-question-generator
|
e626946611df6c5bda5130dcbd2c9dc43f4dbcd9
|
[
"Apache-2.0"
] | null | null | null |
math-question-generator.py
|
happyhipster/math-question-generator
|
e626946611df6c5bda5130dcbd2c9dc43f4dbcd9
|
[
"Apache-2.0"
] | 1
|
2021-07-25T04:02:27.000Z
|
2021-07-25T04:02:27.000Z
|
# 1. generate random number
# 2. print the random number
from random import randint
print(randint(1, 10))
| 22
| 29
| 0.727273
| 17
| 110
| 4.705882
| 0.647059
| 0.3
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05618
| 0.190909
| 110
| 5
| 30
| 22
| 0.842697
| 0.472727
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
63a06ffa224beda238cb88a758bd1342688e0dd0
| 242
|
py
|
Python
|
src/experiment_manager/StorageMan.py
|
gentaman/experiment_manager
|
ca57ec1340c6fd04031328204556ac414fa09f50
|
[
"MIT"
] | null | null | null |
src/experiment_manager/StorageMan.py
|
gentaman/experiment_manager
|
ca57ec1340c6fd04031328204556ac414fa09f50
|
[
"MIT"
] | null | null | null |
src/experiment_manager/StorageMan.py
|
gentaman/experiment_manager
|
ca57ec1340c6fd04031328204556ac414fa09f50
|
[
"MIT"
] | null | null | null |
# TODO: Implement
class StorageMan():
"""
Storage Manager
"""
def __init__(self):
pass
def stroe_info(self):
pass
def store_result(self):
pass
def _get_file_path(self):
pass
| 13.444444
| 29
| 0.53719
| 26
| 242
| 4.653846
| 0.653846
| 0.264463
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.367769
| 242
| 17
| 30
| 14.235294
| 0.79085
| 0.132231
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 1
| 0.444444
| false
| 0.444444
| 0
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
63cee594e5866a28bb6504e4983404377bc5a320
| 8,242
|
py
|
Python
|
accelbyte_py_sdk/api/ugc/wrappers/_public_group.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/ugc/wrappers/_public_group.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/ugc/wrappers/_public_group.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import HeaderStr
from ....core import get_namespace as get_services_namespace
from ....core import run_request
from ....core import run_request_async
from ....core import same_doc_as
from ..models import ModelsCreateGroupRequest
from ..models import ModelsCreateGroupResponse
from ..models import ModelsPaginatedContentDownloadResponse
from ..models import ModelsPaginatedGroupResponse
from ..models import ResponseError
from ..operations.public_group import CreateGroup
from ..operations.public_group import DeleteGroup
from ..operations.public_group import GetGroup
from ..operations.public_group import GetGroupContent
from ..operations.public_group import GetGroups
from ..operations.public_group import UpdateGroup
@same_doc_as(CreateGroup)
def create_group(body: ModelsCreateGroupRequest, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateGroup.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(CreateGroup)
async def create_group_async(body: ModelsCreateGroupRequest, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = CreateGroup.create(
body=body,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteGroup)
def delete_group(group_id: str, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteGroup.create(
group_id=group_id,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(DeleteGroup)
async def delete_group_async(group_id: str, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = DeleteGroup.create(
group_id=group_id,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroup)
def get_group(group_id: str, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroup.create(
group_id=group_id,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroup)
async def get_group_async(group_id: str, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroup.create(
group_id=group_id,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroupContent)
def get_group_content(group_id: str, user_id: str, limit: Optional[int] = None, offset: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroupContent.create(
group_id=group_id,
user_id=user_id,
limit=limit,
offset=offset,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroupContent)
async def get_group_content_async(group_id: str, user_id: str, limit: Optional[int] = None, offset: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroupContent.create(
group_id=group_id,
user_id=user_id,
limit=limit,
offset=offset,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroups)
def get_groups(user_id: str, limit: Optional[int] = None, offset: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroups.create(
user_id=user_id,
limit=limit,
offset=offset,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(GetGroups)
async def get_groups_async(user_id: str, limit: Optional[int] = None, offset: Optional[int] = None, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = GetGroups.create(
user_id=user_id,
limit=limit,
offset=offset,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateGroup)
def update_group(body: ModelsCreateGroupRequest, group_id: str, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateGroup.create(
body=body,
group_id=group_id,
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs)
@same_doc_as(UpdateGroup)
async def update_group_async(body: ModelsCreateGroupRequest, group_id: str, user_id: str, namespace: Optional[str] = None, x_additional_headers: Optional[Dict[str, str]] = None, **kwargs):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = UpdateGroup.create(
body=body,
group_id=group_id,
user_id=user_id,
namespace=namespace,
)
return await run_request_async(request, additional_headers=x_additional_headers, **kwargs)
| 37.463636
| 220
| 0.710871
| 1,021
| 8,242
| 5.516161
| 0.107738
| 0.038352
| 0.076705
| 0.051136
| 0.791193
| 0.749645
| 0.749645
| 0.748757
| 0.732067
| 0.732067
| 0
| 0.000602
| 0.193885
| 8,242
| 219
| 221
| 37.634703
| 0.84708
| 0.092939
| 0
| 0.757396
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035503
| false
| 0
| 0.100592
| 0
| 0.278107
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
893f431e1ff4eefd3b98f63e66f686e6bc88fa1a
| 30,025
|
py
|
Python
|
pdc/apps/componentbranch/tests.py
|
hluk/product-definition-center
|
af79f73c30fa5f5709ba03d584b7a49b83166b81
|
[
"MIT"
] | 18
|
2015-12-15T17:56:18.000Z
|
2021-04-10T13:49:48.000Z
|
pdc/apps/componentbranch/tests.py
|
hluk/product-definition-center
|
af79f73c30fa5f5709ba03d584b7a49b83166b81
|
[
"MIT"
] | 303
|
2015-11-18T07:37:06.000Z
|
2021-05-26T12:34:01.000Z
|
pdc/apps/componentbranch/tests.py
|
hluk/product-definition-center
|
af79f73c30fa5f5709ba03d584b7a49b83166b81
|
[
"MIT"
] | 27
|
2015-11-19T20:33:54.000Z
|
2021-03-25T08:15:28.000Z
|
#
# Copyright (c) 2017 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from datetime import datetime, timedelta
from django.core.urlresolvers import reverse
from rest_framework.test import APITestCase
from rest_framework import status
from pdc.apps.component.models import GlobalComponent, ReleaseComponentType
from pdc.apps.componentbranch.models import (
ComponentBranch, SLAToComponentBranch, SLA)
class SLAAPITestCase(APITestCase):
fixtures = ['pdc/apps/componentbranch/fixtures/tests/sla.json']
def test_create_sla(self):
url = reverse('sla-list')
data = {
'name': 'features',
'description': 'A wonderful description'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
expected_rv = {
'name': 'features',
'description': 'A wonderful description',
'id': 3
}
self.assertEqual(response.data, expected_rv)
def test_get_sla(self):
url = reverse('sla-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2)
self.assertEqual(response.data['results'][0]['id'], 2)
self.assertEqual(response.data['results'][0]['name'], 'bug_fixes')
self.assertEqual(response.data['results'][0]['description'],
'Bug fixes are applied')
def test_patch_sla(self):
url = reverse('sla-detail', args=[1])
data = {
'description': 'A new description'
}
response = self.client.patch(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], 1)
self.assertEqual(response.data['name'], 'security_fixes')
self.assertEqual(response.data['description'], 'A new description')
def test_patch_sla_change_name_error(self):
url = reverse('sla-detail', args=[1])
data = {
'name': 'some_new_name'
}
response = self.client.patch(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error = {'name': ["You may not modify the SLA's name due to policy"]}
self.assertEqual(response.data, error)
def test_put_sla(self):
url = reverse('sla-detail', args=[1])
data = {
'name': 'security_fixes',
'description': 'A new description'
}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], 1)
self.assertEqual(response.data['name'], 'security_fixes')
self.assertEqual(response.data['description'], 'A new description')
def test_put_sla_change_name_error(self):
url = reverse('sla-detail', args=[1])
data = {
'name': 'some_new_name',
'description': 'A new description'
}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error = {'name': ["You may not modify the SLA's name due to policy"]}
self.assertEqual(response.data, error)
def test_delete_sla(self):
url = reverse('sla-detail', args=[1])
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class ComponentBranchAPITestCase(APITestCase):
fixtures = ['pdc/apps/componentbranch/fixtures/tests/global_component.json',
'pdc/apps/componentbranch/fixtures/tests/sla.json',
'pdc/apps/componentbranch/fixtures/tests/componentbranch.json']
def test_create_branch(self):
url = reverse('componentbranch-list')
data = {
'name': '3.6',
'global_component': 'python',
'type': 'rpm',
'critical_path': True
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
expected_rv = {
'name': '3.6',
'slas': [],
'global_component': 'python',
'active': False,
'critical_path': True,
'type': 'rpm',
'id': 3
}
self.assertEqual(response.data, expected_rv)
def test_create_branch_critical_path_default(self):
url = reverse('componentbranch-list')
data = {
'name': '3.6',
'global_component': 'python',
'type': 'rpm'
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
branch = ComponentBranch.objects.filter(id=1).first()
self.assertEqual(branch.critical_path, False)
def test_create_branch_bad_name(self):
url = reverse('componentbranch-list')
data = {
'name': 'epel7',
'global_component': 'python',
'type': 'rpm',
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
expected_rv = {'name': ['The branch name is not allowed based on the regex "^epel\\d+$"']}
self.assertEqual(response.data, expected_rv)
def test_get_branch(self):
url = reverse('componentbranch-list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2)
self.assertEqual(response.data['results'][0]['id'], 2)
self.assertEqual(response.data['results'][0]['name'], '2.6')
self.assertEqual(response.data['results'][0]['global_component'],
'python')
self.assertEqual(response.data['results'][0]['type'], 'rpm')
self.assertFalse(response.data['results'][0]['active'])
self.assertFalse(response.data['results'][0]['critical_path'])
def test_get_branch_filter(self):
url = reverse('componentbranch-list')
url = '{0}?global_component=python&type=rpm&name=2.7'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
self.assertEqual(response.data['results'][0]['id'], 1)
self.assertEqual(response.data['results'][0]['name'], '2.7')
self.assertEqual(response.data['results'][0]['global_component'],
'python')
self.assertEqual(response.data['results'][0]['type'], 'rpm')
self.assertFalse(response.data['results'][0]['active'])
self.assertFalse(response.data['results'][0]['critical_path'])
def test_get_branch_filter_case_sensitive(self):
gc2 = GlobalComponent(name='Python')
gc2.save()
rpm_ct = ReleaseComponentType.objects.get(name='rpm')
cb2 = ComponentBranch(name='some_branch', global_component=gc2,
type=rpm_ct)
cb2.save()
url = reverse('componentbranch-list')
url = '{0}?global_component=python'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2)
self.assertEqual(response.data['results'][0]['global_component'],
'python')
self.assertEqual(response.data['results'][1]['global_component'],
'python')
url2 = reverse('componentbranch-list')
url2 = '{0}?global_component=Python'.format(url2)
response2 = self.client.get(url2)
self.assertEqual(response2.status_code, status.HTTP_200_OK)
self.assertEqual(response2.data['count'], 1)
self.assertEqual(response2.data['results'][0]['id'], 3)
self.assertEqual(response2.data['results'][0]['name'], 'some_branch')
self.assertEqual(response2.data['results'][0]['global_component'],
'Python')
self.assertEqual(response2.data['results'][0]['type'], 'rpm')
self.assertFalse(response2.data['results'][0]['active'])
self.assertFalse(response2.data['results'][0]['critical_path'])
def test_patch_branch(self):
gc2 = GlobalComponent(name='pythonx')
gc2.save()
url = reverse('componentbranch-detail', args=[2])
data = {
'global_component': 'pythonx'
}
response = self.client.patch(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], 2)
self.assertEqual(response.data['name'], '2.6')
self.assertEqual(response.data['global_component'],
'pythonx')
self.assertEqual(response.data['type'], 'rpm')
self.assertFalse(response.data['active'])
self.assertFalse(response.data['critical_path'])
def test_patch_branch_change_name_error(self):
url = reverse('componentbranch-detail', args=[1])
data = {
'name': '3.6'
}
response = self.client.patch(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error_msg = {
'name': ["You may not modify the branch's name due to policy"]}
self.assertEqual(response.data, error_msg)
def test_put_branch(self):
gc2 = GlobalComponent(name='pythonx')
gc2.save()
url = reverse('componentbranch-detail', args=[2])
data = {
'name': '2.6',
'global_component': 'pythonx',
'type': 'rpm',
'critical_path': False
}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], 2)
self.assertEqual(response.data['name'], '2.6')
self.assertEqual(response.data['global_component'],
'pythonx')
self.assertEqual(response.data['type'], 'rpm')
self.assertFalse(response.data['active'])
self.assertFalse(response.data['critical_path'])
def test_put_branch_change_name_error(self):
url = reverse('componentbranch-detail', args=[1])
data = {
'name': '3.6',
'global_component': 'python',
'type': 'rpm',
'critical_path': False
}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error_msg = {
'name': ["You may not modify the branch's name due to policy"]}
self.assertEqual(response.data, error_msg)
def test_delete_branch(self):
url = reverse('componentbranch-detail', args=[1])
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_branch_with_slas(self):
branch = ComponentBranch.objects.get(id=1)
sla = SLA.objects.get(name='bug_fixes')
sla_entry = SLAToComponentBranch(
sla=sla, branch=branch, eol='2222-01-01')
sla_entry.save()
url = reverse('componentbranch-detail', args=[1])
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
class SLAToBranchAPITestCase(APITestCase):
fixtures = ['pdc/apps/componentbranch/fixtures/tests/global_component.json',
'pdc/apps/componentbranch/fixtures/tests/sla.json',
'pdc/apps/componentbranch/fixtures/tests/componentbranch.json',
'pdc/apps/componentbranch/fixtures/tests/slatocomponentbranch.json']
def test_create_sla_to_branch_branch_exists(self):
url = reverse('slatocomponentbranch-list')
data = {
'sla': 'bug_fixes',
'eol': '2222-01-01',
'branch': {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'critical_path': False
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
expected_rv = {
'sla': 'bug_fixes',
'eol': '2222-01-01',
'branch': {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'active': True,
'critical_path': False,
'id': 1
},
'id': 2
}
self.assertEqual(response.data, expected_rv)
def test_create_sla_to_branch_branch_exists_critical_path_wrong(self):
url = reverse('slatocomponentbranch-list')
data = {
'sla': 'bug_fixes',
'eol': '2222-01-01',
'branch': {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'critical_path': True
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error = ('The found branch\'s critical_path field did not match the '
'supplied value')
error_msg = {'branch.critical_path': [error]}
self.assertEqual(response.data, error_msg)
def test_create_sla_to_branch(self):
url = reverse('slatocomponentbranch-list')
data = {
'sla': 'bug_fixes',
'eol': '2222-01-01',
'branch': {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'critical_path': False
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
expected_rv = {
'sla': 'bug_fixes',
'eol': '2222-01-01',
'branch': {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'active': True,
'critical_path': False,
'id': 1
},
'id': 2
}
self.assertEqual(response.data, expected_rv)
def test_create_sla_to_branch_branch(self):
url = reverse('slatocomponentbranch-list')
data = {
'sla': 'security_fixes',
'eol': '2222-01-01',
'branch': {
'name': '3.6',
'global_component': 'python',
'type': 'rpm',
'critical_path': True
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
expected_rv = {
'sla': 'security_fixes',
'eol': '2222-01-01',
'branch': {
'name': '3.6',
'global_component': 'python',
'type': 'rpm',
'active': True,
'critical_path': True,
'id': 3
},
'id': 2
}
self.assertEqual(response.data, expected_rv)
def test_create_sla_to_branch_branch_critical_path_default(self):
url = reverse('slatocomponentbranch-list')
data = {
'sla': 'security_fixes',
'eol': '2222-01-01',
'branch': {
'name': '3.6',
'global_component': 'python',
'type': 'rpm',
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
expected_rv = {
'sla': 'security_fixes',
'eol': '2222-01-01',
'branch': {
'name': '3.6',
'global_component': 'python',
'type': 'rpm',
'active': True,
'critical_path': False,
'id': 3
},
'id': 2
}
self.assertEqual(response.data, expected_rv)
def test_create_sla_to_branch_bad_branch_name(self):
url = reverse('slatocomponentbranch-list')
data = {
'sla': 'security_fixes',
'eol': '2222-01-01',
'branch': {
'name': 'epel7',
'global_component': 'python',
'type': 'rpm',
'critical_path': False
}
}
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
expected_rv = {'branch': {'name': ['The branch name is not allowed based on the regex "^epel\\d+$"']}}
self.assertEqual(response.data, expected_rv)
def test_get_sla_to_branch(self):
url = reverse('slatocomponentbranch-list')
response = self.client.get(url)
expected_branch = {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'active': True,
'critical_path': False,
'id': 1
}
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
self.assertEqual(response.data['results'][0]['id'], 1)
self.assertEqual(response.data['results'][0]['sla'], 'security_fixes')
self.assertEqual(response.data['results'][0]['eol'], '2222-01-01')
self.assertEqual(response.data['results'][0]['branch'], expected_branch)
def test_get_sla_to_branch_filtering(self):
url = reverse('slatocomponentbranch-list')
url = '{0}?branch=2.7&global_component=python&branch_type=rpm'.format(url)
response = self.client.get(url)
expected_branch = {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'active': True,
'critical_path': False,
'id': 1
}
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
self.assertEqual(response.data['results'][0]['id'], 1)
self.assertEqual(response.data['results'][0]['sla'], 'security_fixes')
self.assertEqual(response.data['results'][0]['eol'], '2222-01-01')
self.assertEqual(response.data['results'][0]['branch'], expected_branch)
def test_patch_sla_to_branch(self):
url = reverse('slatocomponentbranch-detail', args=[1])
data = {
'eol': '2222-03-01'
}
response = self.client.patch(url, data, format='json')
expected_branch = {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'active': True,
'critical_path': False,
'id': 1
}
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], 1)
self.assertEqual(response.data['sla'], 'security_fixes')
self.assertEqual(response.data['eol'], '2222-03-01')
self.assertEqual(response.data['branch'], expected_branch)
def test_patch_sla_to_branch_change_branch_error(self):
url = reverse('slatocomponentbranch-detail', args=[1])
data = {
'branch': {
'name': '3.6',
'global_component': 'python',
'type': 'rpm',
'critical_path': False,
}
}
response = self.client.patch(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error_msg = {'branch': ['The branch cannot be modified using this API']}
self.assertEqual(response.data, error_msg)
def test_put_sla_to_branch(self):
url = reverse('slatocomponentbranch-detail', args=[1])
branch = {
'name': '2.7',
'global_component': 'python',
'type': 'rpm',
'critical_path': False
}
data = {
'sla': 'security_fixes',
'eol': '2222-03-01',
'branch': branch,
}
response = self.client.put(url, data, format='json')
branch['id'] = 1
branch['active'] = True
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], 1)
self.assertEqual(response.data['sla'], 'security_fixes')
self.assertEqual(response.data['eol'], '2222-03-01')
self.assertEqual(response.data['branch'], branch)
def test_put_sla_to_branch_no_critical_path(self):
url = reverse('slatocomponentbranch-detail', args=[1])
branch = {
'name': '2.7',
'global_component': 'python',
'type': 'rpm'
}
data = {
'sla': 'security_fixes',
'eol': '2222-03-01',
'branch': branch,
}
response = self.client.put(url, data, format='json')
branch['id'] = 1
branch['active'] = True
branch['critical_path'] = False
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['id'], 1)
self.assertEqual(response.data['sla'], 'security_fixes')
self.assertEqual(response.data['eol'], '2222-03-01')
self.assertEqual(response.data['branch'], branch)
def test_put_sla_to_branch_change_branch_error(self):
url = reverse('slatocomponentbranch-detail', args=[1])
branch = {
'name': '3.5',
'global_component': 'python',
'type': 'rpm',
'critical_path': False
}
data = {
'sla': 'security_fixes',
'eol': '2222-03-01',
'branch': branch,
}
response = self.client.put(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
error = {'branch': ['The branch cannot be modified using this API']}
self.assertEqual(response.data, error)
def test_delete_sla_to_branch(self):
url = reverse('slatocomponentbranch-detail', args=[1])
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete_sla_with_sla_to_branch_relationships(self):
branch = ComponentBranch.objects.get(id=1)
sla = SLA.objects.get(name='bug_fixes')
sla_entry = SLAToComponentBranch(
sla=sla, branch=branch, eol='2222-01-01')
sla_entry.save()
url = reverse('sla-detail', args=[2])
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_active_componentbranch_false(self):
url = reverse('componentbranch-list')
url = '{0}?active=false'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
def test_active_with_sla_of_today(self):
today = str(datetime.utcnow().date())
branch = ComponentBranch.objects.get(id=2)
sla = SLA.objects.get(name='bug_fixes')
sla_entry = SLAToComponentBranch(
sla=sla, branch=branch, eol=today)
sla_entry.save()
url = reverse('componentbranch-detail', args=[2])
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(response.data['active'])
def test_active_with_sla_of_yesterday(self):
yesterday = str(datetime.utcnow().date() - timedelta(days=1))
branch = ComponentBranch.objects.get(id=2)
sla = SLA.objects.get(name='bug_fixes')
sla_entry = SLAToComponentBranch(
sla=sla, branch=branch, eol=yesterday)
sla_entry.save()
url = reverse('componentbranch-detail', args=[2])
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(response.data['active'])
def test_active_with_valid_and_invalid_sla(self):
yesterday = str(datetime.utcnow().date() - timedelta(days=1))
tomorrow = str(datetime.utcnow().date() + timedelta(days=1))
branch = ComponentBranch.objects.get(id=2)
sla_bug_fixes = SLA.objects.get(name='bug_fixes')
sla_entry_one = SLAToComponentBranch(
sla=sla_bug_fixes, branch=branch, eol=yesterday)
sla_entry_one.save()
sla_security_fixes = SLA.objects.get(name='security_fixes')
sla_entry_two = SLAToComponentBranch(
sla=sla_security_fixes, branch=branch, eol=tomorrow)
sla_entry_two.save()
url = reverse('componentbranch-detail', args=[2])
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(response.data['active'])
def test_active_filter_sla(self):
yesterday = str(datetime.utcnow().date() - timedelta(days=1))
branch = ComponentBranch.objects.get(id=2)
sla_bug_fixes = SLA.objects.get(name='bug_fixes')
sla_entry_one = SLAToComponentBranch(
sla=sla_bug_fixes, branch=branch, eol=yesterday)
sla_entry_one.save()
url = reverse('slatocomponentbranch-list')
url = '{0}?branch_active=true'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
def test_active_filter_false(self):
yesterday = str(datetime.utcnow().date() - timedelta(days=1))
branch = ComponentBranch.objects.get(id=2)
sla_bug_fixes = SLA.objects.get(name='bug_fixes')
sla_entry_one = SLAToComponentBranch(
sla=sla_bug_fixes, branch=branch, eol=yesterday)
sla_entry_one.save()
sla_security_fixes = SLA.objects.get(name='security_fixes')
sla_entry_two = SLAToComponentBranch(
sla=sla_security_fixes, branch=branch, eol=yesterday)
sla_entry_two.save()
url = reverse('slatocomponentbranch-list')
url = '{0}?branch_active=false'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 2)
def test_active_filter_false_one_valid_one_invalid(self):
tomorrow = str(datetime.utcnow().date() + timedelta(days=1))
yesterday = str(datetime.utcnow().date() - timedelta(days=1))
branch = ComponentBranch.objects.get(id=2)
sla_bug_fixes = SLA.objects.get(name='bug_fixes')
sla_entry_one = SLAToComponentBranch(
sla=sla_bug_fixes, branch=branch, eol=yesterday)
sla_entry_one.save()
sla_security_fixes = SLA.objects.get(name='security_fixes')
sla_entry_two = SLAToComponentBranch(
sla=sla_security_fixes, branch=branch, eol=tomorrow)
sla_entry_two.save()
url = reverse('slatocomponentbranch-list')
url = '{0}?branch_active=false'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 0)
def test_active_filter_sla_valid_and_invalid(self):
yesterday = str(datetime.utcnow().date() - timedelta(days=1))
tomorrow = str(datetime.utcnow().date() + timedelta(days=1))
branch = ComponentBranch.objects.get(id=2)
sla_bug_fixes = SLA.objects.get(name='bug_fixes')
sla_entry_one = SLAToComponentBranch(
sla=sla_bug_fixes, branch=branch, eol=yesterday)
sla_entry_one.save()
sla_security_fixes = SLA.objects.get(name='security_fixes')
sla_entry_two = SLAToComponentBranch(
sla=sla_security_fixes, branch=branch, eol=tomorrow)
sla_entry_two.save()
url = reverse('slatocomponentbranch-list')
url = '{0}?branch_active=true'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 3)
def test_get_branch_filter_case_sensitive(self):
gc2 = GlobalComponent(name='Python')
gc2.save()
rpm_ct = ReleaseComponentType.objects.get(name='rpm')
cb2 = ComponentBranch(name='some_branch', global_component=gc2,
type=rpm_ct)
cb2.save()
tomorrow = str(datetime.utcnow().date() + timedelta(days=1))
sla_bug_fixes = SLA.objects.get(name='bug_fixes')
sla_entry = SLAToComponentBranch(
sla=sla_bug_fixes, branch=cb2, eol=tomorrow)
sla_entry.save()
url = reverse('slatocomponentbranch-list')
url = '{0}?global_component=python'.format(url)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['count'], 1)
self.assertEqual(response.data['results'][0]['branch']['global_component'], 'python')
url2 = reverse('slatocomponentbranch-list')
url2 = '{0}?global_component=Python'.format(url2)
response2 = self.client.get(url2)
self.assertEqual(response2.status_code, status.HTTP_200_OK)
self.assertEqual(response2.data['count'], 1)
self.assertEqual(response2.data['results'][0]['branch']['global_component'], 'Python')
| 41.470994
| 110
| 0.593705
| 3,302
| 30,025
| 5.22229
| 0.052998
| 0.111343
| 0.157388
| 0.117432
| 0.930527
| 0.91916
| 0.896022
| 0.86575
| 0.845685
| 0.831884
| 0
| 0.024066
| 0.269276
| 30,025
| 723
| 111
| 41.528354
| 0.761896
| 0.003264
| 0
| 0.77027
| 0
| 0
| 0.182982
| 0.048728
| 0
| 0
| 0
| 0
| 0.211712
| 1
| 0.064565
| false
| 0
| 0.009009
| 0
| 0.082583
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
894e03c5c8057cd8419510e31d8fb093a2bafe52
| 32,025
|
py
|
Python
|
tests/unit_tests/controllers_tests/test_ucs_controller.py
|
HQebupt/ucs-service
|
217c37ca28398f779b737afe9578bb81d0e01423
|
[
"Apache-2.0"
] | null | null | null |
tests/unit_tests/controllers_tests/test_ucs_controller.py
|
HQebupt/ucs-service
|
217c37ca28398f779b737afe9578bb81d0e01423
|
[
"Apache-2.0"
] | 44
|
2017-01-25T20:09:37.000Z
|
2021-06-01T21:51:59.000Z
|
tests/unit_tests/controllers_tests/test_ucs_controller.py
|
HQebupt/ucs-service
|
217c37ca28398f779b737afe9578bb81d0e01423
|
[
"Apache-2.0"
] | 14
|
2017-01-25T16:07:59.000Z
|
2018-03-07T04:08:59.000Z
|
# Copyright 2017, Dell EMC, Inc.
import unittest
import mock
import controllers.ucs_controller as controler
from ucsmsdk.ucsexception import UcsException
EXPECTED_COOKIE = 'chocolate chip'
HOST = 'hostname'
USER = 'somebody'
PASS = 'secret'
MOCK_HEADER = {
'ucs-host': HOST,
'ucs-password': PASS,
'ucs-user': USER
}
MOCK_DATA = 'data'
MOCK_ID = 'sys/rack1'
MOCK_ID_RACKMOUNT = "sys/rack-unit-7"
MOCK_ID_CHASSIS = "sys/chassis-6"
MOCK_ID_MACS = "PciEquipSlot"
MOCK_MAC = "00:11:22:33:44:55"
MOCK_MAC_2 = "11:22:33:44:55:66"
MOCK_ID_COMPUTEBLADE = "sys/chassis-6/blade-1"
MOCK_ID_COMPUTEBLADE_2 = "sys/chassis-6/blade-2"
MOCK_ID_ADAPTORUNIT = "sys/chassis-6/blade-1/adaptor-1"
MOCK_ID_ADAPTORUNIT_2 = "sys/chassis-6/blade-2/adaptor-1"
MOCK_ID_SERVICEPROFILE = "root"
MOCK_ID_LOGICALSERVER = "org-root/ls-Profile3"
MOCK_ID_LOGICALSERVER_MEMBER = {
'assoc_state': 'associated',
'dn': 'org-root/ls-Profile3',
'pn_dn': 'sys/rack-unit-3',
'rn': 'org-root/ls-Profile3'
}
MOCK_CLASS_IDS = ['processorEnvStats', 'memoryUnitEnvStats']
MOCK_CLASS_ID_DATA = ['CPU', 'Mem']
class test_default_controller(unittest.TestCase):
def setUp(self):
print "running controller tests setup"
def tearDown(self):
print "running controller tests tear down"
@mock.patch('controllers.ucs_controller.request')
@mock.patch('service.ucs.UcsHandle')
def testLoginSuccess(self, mock_ucs, mock_request):
mock_ucs.return_value.login.return_value = True
mock_ucs.return_value.cookie = EXPECTED_COOKIE
mock_request.headers = MOCK_HEADER
cookie = controler.login_get()
mock_ucs.assert_called_with(HOST, USER, PASS, secure=False)
mock_ucs.return_value.login.assert_called_once()
self.assertEqual(cookie, EXPECTED_COOKIE, 'returned cookie %s does not equal %s' % (cookie, EXPECTED_COOKIE))
@mock.patch('controllers.ucs_controller.request')
@mock.patch('service.ucs.UcsHandle')
def testLoginFailure(self, mock_ucs, mock_request):
mock_ucs.return_value.login.return_value = False
mock_ucs.return_value.cookie = EXPECTED_COOKIE
mock_request.headers = MOCK_HEADER
cookie = controler.login_get()
mock_ucs.assert_called_with(HOST, USER, PASS, secure=False)
mock_ucs.return_value.login.assert_called_once()
self.assertIsNone(cookie, 'returned cookie does not equal None')
class mockCatalogClass:
def __init__(self, data):
self._privData = "private 1"
self.data = data
class mockRackmountClass:
def __init__(self, data):
self.dn = data
self.rn = data
self.serial = "RK38"
class mockPciEquipSlot:
def __init__(self, l, r):
self.mac_left = l
self.mac_right = r
class mockChassisClass:
def __init__(self, data):
self.dn = data
class mockComputeBlade:
def __init__(self, data):
self.dn = data
self.rn = data
class mockAaptorUnit:
def __init__(self, id, mac):
self.dn = id
self.rn = id
self.base_mac = mac
class mockServiceProfileClass:
def __init__(self, data):
self.level = data
class mocklsServer:
def __init__(self, data):
self.dn = data
class mocklsServerMembers:
def __init__(self, data):
self.dn = data['dn']
self.rn = data['rn']
self.pn_dn = data['pn_dn']
self.assoc_state = data['assoc_state']
class mockBlade:
def __init__(self, class_id, dn, assigned_to_dn):
self._class_id = class_id
self.dn = dn
self.assigned_to_dn = assigned_to_dn
class mockServiceProfile:
def __init__(self, dn, class_id, state=None):
self.dn = dn
self._class_id = class_id
class mockManagedObject:
def __init__(self, dn, class_id, admin_power='policy'):
self.dn = dn
self._class_id = class_id
self.admin_power = admin_power
class mockCurrentApp:
config = None
def __init__(self, data):
self.config = {"handlers": data}
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetRackmountSuccess(self, mock_ucs, mock_request, mock_current_app):
left_mac = "00:00:FF:38:64:00"
right_mac = "00:00:FF:38:64:01"
# setup UCS mocks
mock_ucs.return_value.query_children.side_effect = \
[[self.mockRackmountClass(data=MOCK_ID_RACKMOUNT)], [self.mockPciEquipSlot(left_mac, right_mac)]]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything")
# call getRackmount
result = controler.getRackmount()
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
calls = [mock.call(class_id='computeRackUnit', in_dn='sys'),
mock.call(class_id=MOCK_ID_MACS, in_dn=MOCK_ID_RACKMOUNT)]
mock_ucs.return_value.query_children.assert_has_calls(calls)
# verify return data
di = {
'macs': [left_mac, right_mac],
'name': MOCK_ID_RACKMOUNT,
'path': MOCK_ID_RACKMOUNT
}
self.assertEqual(di, result[0][0], 'result does not contain member "data"')
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetRackmounForbidden(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 403 http error"""
# setup UCS mocks
mock_ucs.return_value = None
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything")
# call getCaltalog
result = controler.getRackmount()
# verify UCS Mocks were not called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(result, ({'status': 403, 'message': 'Forbidden', 'stack': ''}, 403))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetRackmounInternalServerError(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 500 http error"""
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_children.side_effect = [""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything")
# call getCaltalog
result = controler.getRackmount()
# verify UCS Mocks were called
self.assertEqual(result, ({'message': "Couldn't fetch computeRackUnits", 'stack': '', 'status': 500}, 500))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetCatalogSuccess(self, mock_ucs, mock_request, mock_current_app):
# setup UCS mocks
mock_ucs.return_value.query_children.side_effect = [[self.mockCatalogClass(data=MOCK_DATA)]]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call getCaltalog
result = controler.getCatalog(identifier=MOCK_ID)
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
calls = [mock.call(in_dn=MOCK_ID)]
mock_ucs.return_value.query_children.assert_has_calls(calls)
# verify return data
self.assertIn({'data': 'data'}, result[0], 'result does not contain member "data"')
self.assertEqual(MOCK_DATA, result[0][0]['data'], 'result["data"] does not equal "{}"'.format(MOCK_DATA))
self.assertNotIn('_privData', result[0], 'result contains private member "_privData"')\
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetCatalogForbiden(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 403 http error"""
# setup UCS mocks
mock_ucs.return_value = None
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything")
# call getCaltalog
result = controler.getCatalog(identifier=MOCK_ID)
# verify UCS Mocks were not called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(result, ({'status': 403, 'message': 'Forbidden', 'stack': ''}, 403))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('service.ucs.UcsHandle')
def testGetCatalogInternalServerError(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 500 http error"""
# setup UCS mocks
mock_ucs.return_value.login.return_value = True
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_children.side_effect = [[]]
mock_ucs.return_value.query_dn.side_effect = UcsException(500, "Mock Server Error")
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
mock_request.headers = MOCK_HEADER
# call getCaltalog
result = controler.getCatalog(identifier=MOCK_ID)
# verify UCS Mocks were called
mock_ucs.assert_called_with(HOST, USER, PASS, secure=False)
mock_ucs.return_value.login.assert_called_once()
calls = [mock.call(in_dn=MOCK_ID)]
mock_ucs.return_value.query_children.assert_has_calls(calls)
calls = [mock.call(dn=MOCK_ID)]
mock_ucs.return_value.query_dn.assert_has_calls(calls)
# verify return data
self.assertEqual(result, ({'message': "Internal Server Error", 'stack': 'Mock Server Error', 'status': 500}, 500))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetChassisSuccess(self, mock_ucs, mock_request, mock_current_app):
# setup UCS mocks
mock_ucs.return_value.login.return_value = True
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_children.side_effect = \
[[self.mockChassisClass(data=MOCK_ID_CHASSIS)],
[self.mockComputeBlade(data=MOCK_ID_COMPUTEBLADE), self.mockComputeBlade(data=MOCK_ID_COMPUTEBLADE_2)],
[self.mockAaptorUnit(MOCK_ID_ADAPTORUNIT, MOCK_MAC)],
[self.mockAaptorUnit(MOCK_ID_ADAPTORUNIT_2, MOCK_MAC_2)]]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call getChassis
result = controler.getChassis()
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
# verify return data
di = \
[
{
'macs': [MOCK_MAC],
'name': MOCK_ID_COMPUTEBLADE,
'path': MOCK_ID_COMPUTEBLADE
},
{
'macs': [MOCK_MAC_2],
'name': MOCK_ID_COMPUTEBLADE_2,
'path': MOCK_ID_COMPUTEBLADE_2
}
]
self.assertEqual(1, len(result[0]), "expected 1 chassis, got {}".format(len(result)))
self.assertEqual(di, result[0][0]["members"], "Unexpected Chassis Data")
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetChassisForbiden(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 403 http error"""
# setup UCS mocks
mock_ucs.return_value = None
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call getChassis
result = controler.getChassis()
# verify UCS Mocks were not called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(result, ({'status': 403, 'message': 'Forbidden', 'stack': ''}, 403))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetChassisInternalServerError(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 500 http error"""
# setup UCS mocks
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_children.side_effect = ["", "", ""]
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
mock_request.headers = MOCK_HEADER
# call getChassis
result = controler.getChassis()
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
# verify return data
self.assertEqual(result, ({'status': 500, 'message': "Couldn't fetch EquipmentChassis", 'stack': ''}, 500))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetServiceProfileSuccess(self, mock_ucs, mock_request, mock_current_app):
serverData = "sys/rack-unit-3"
state = "associated"
# setup UCS mocks
mock_ucs.return_value.login.return_value = True
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_classid.side_effect = \
[[self.mockServiceProfileClass(data=MOCK_ID_SERVICEPROFILE)]
]
mock_ucs.return_value.query_children.side_effect = \
[[self.mocklsServer(data=MOCK_ID_LOGICALSERVER)]
]
mock_ucs.return_value.query_dn.side_effect = \
[self.mocklsServerMembers(data=MOCK_ID_LOGICALSERVER_MEMBER)]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call getServiceProfile
result = controler.getServiceProfile()
# verify return data
di = {
'name': MOCK_ID_LOGICALSERVER,
'path': MOCK_ID_LOGICALSERVER,
'associatedServer': serverData,
'assoc_state': state
}
self.assertEqual(di, result[0]['ServiceProfile']['members'][0], "Unexpected Chassis Data")
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetServiceProfileForbiden(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 403 http error"""
# setup UCS mocks
mock_ucs.return_value = None
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call getServiceProfile
result = controler.getServiceProfile()
# verify UCS Mocks were not called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(result, ({'status': 403, 'message': 'Forbidden', 'stack': ''}, 403))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testGetServiceProfileInternalServerError(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 500 http error"""
# setup UCS mocks
mock_ucs.return_value.query_children.side_effect = [""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call getServiceProfile
result = controler.getServiceProfile()
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
# verify return data
self.assertEqual(result, ({'status': 500, 'message': "Couldn't fetch ServiceProfile", 'stack': ''}, 500))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('service.ucs.LsPower')
@mock.patch('service.ucs.LsPowerConsts')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerSuccess(self, mock_ucs, mock_request, mock_LsPower_Consts, mock_LsPower, mock_current_app):
# setup UCS mocks
mock_LsPower_Consts.return_value.STATE_DOWN.return_value = True
mock_LsPower.return_value.return_value = True
mock_ucs.return_value.query_dn.side_effect = \
[self.mockBlade("Non_LsServer", "sys/chassis-3/blade-3", "org-root/ls-ps1"),
self.mockServiceProfile("org-root/ls-ps1", "LsServer"), ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
controler.powerMgmt("sys/chassis-3/blade-3", "off")
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
assert mock_ucs.return_value.query_dn.call_count == 2
# assert that the appropriate service profile constant has been set
assert mock_LsPower_Consts.STATE_DOWN is not None
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerInternalServerError_1(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 500 http error by sending an invalid power operation 'off3'"""
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_dn.side_effect = \
[self.mockBlade("Non_LsServer", "sys/chassis-3/blade-3", "org-root/ls-ps1"),
self.mockServiceProfile("org-root/ls-ps1", "LsServer"), ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
result = controler.powerMgmt("sys/chassis-3/blade-3", "off3")
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
mock_ucs.return_value.logout.assert_called_once()
assert mock_ucs.return_value.query_dn.call_count == 2
di = ({'status': 500, 'message': 'Internal Server Error',
'stack': "action 'off3' is not valid. Choose one of the following: "
"'on', 'off', 'cycle-wait','cycle-immediate', 'bmc-reset-immediate', "
"'ipmi-reset', 'hard-reset-immediate', 'soft-shut-down' "}, 500)
self.assertEqual(di, result, "Unexpected exception Data")
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerInternalServerError_2(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 500 http error by sending an invalid dn"""
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_dn.side_effect = \
[None,
"", ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
result = controler.powerMgmt("sys/chassis-3/blade-3", "off")
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
mock_ucs.return_value.logout.assert_called_once()
assert mock_ucs.return_value.query_dn.call_count == 1
di = ({'status': 500, 'message': 'Internal Server Error', 'stack': 'sever sys/chassis-3/blade-3 does not exist'}, 500)
self.assertEqual(di, result, "Unexpected exception Data")
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerInternalServerError_3(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 500 http error by not associating a server to a service profile"""
mock_ucs.return_value.logout.return_value = True
mock_ucs.return_value.query_dn.side_effect = \
[self.mockBlade("Non_LsServer", "sys/chassis-3/blade-3", ""),
self.mockServiceProfile("org-root/ls-ps1", "LsServer"), ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
result = controler.powerMgmt("sys/chassis-3/blade-3", "off")
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
mock_ucs.return_value.logout.assert_called_once()
# assert mock_ucs.return_value.query_dn.call_count == 2
di = ({'status': 500, 'message': 'Internal Server Error',
'stack': 'sever sys/chassis-3/blade-3 is not associated to a service profile'}, 500)
self.assertEqual(di, result, "Unexpected exception Data")
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerMgmtForbiden(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 403 http error"""
# setup UCS mocks
mock_ucs.return_value = None
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
result = controler.powerMgmt(identifier=MOCK_ID)
# verify UCS Mocks were not called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(result, ({'status': 403, 'message': 'Forbidden', 'stack': ''}, 403))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('service.ucs.LsPower')
@mock.patch('service.ucs.LsPowerConsts')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerStatusSuccess(self, mock_ucs, mock_request, mock_LsPower_Consts, mock_LsPower, mock_current_app):
# setup UCS mocks
mock_LsPower_Consts.return_value.state.return_value = True
mock_LsPower.return_value.return_value = "off"
mock_ucs.return_value.query_dn.side_effect = \
[self.mockBlade("Non_LsServer", "sys/chassis-3/blade-3", "org-root/ls-ps1"),
self.mockServiceProfile("org-root/ls-ps1", "LsServer"), ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
controler.powerStatus("sys/chassis-3/blade-3")
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
assert mock_ucs.return_value.query_dn.call_count == 2
# assert that the appropriate service profile constant has been set
assert mock_LsPower_Consts.state.value is not "off"
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerStatusForbiden(self, mock_ucs, mock_request, mock_current_app):
"""Invoke a 403 http error"""
# setup UCS mocks
mock_ucs.return_value = None
mock_request.headers = MOCK_HEADER
# call powerMgmt
result = controler.powerStatus(identifier=MOCK_ID)
# verify UCS Mocks were not called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(result, ({'status': 403, 'message': 'Forbidden', 'stack': ''}, 403))
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerPhysicalSuccess(self, mock_ucs, mock_request, mock_current_app):
# setup UCS mocks
mockBlade = self.mockBlade("LsServer", "ls-Chassis3Blade3", "sys/chassis-3/blade-3")
mockMo = self.mockManagedObject("org-root/ls-ps1", "LsServer")
mock_ucs.return_value.query_dn.side_effect = [mockBlade, mockMo, ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
controler.powerMgmt("ls-Chassis3Blade3", "off", True)
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(mock_ucs.return_value.query_dn.call_count, 2)
calls = [mock.call("ls-Chassis3Blade3"), mock.call("sys/chassis-3/blade-3")]
mock_ucs.return_value.query_dn.assert_has_calls(calls)
calls = [mock.call(mockMo)]
mock_ucs.return_value.set_mo.assert_has_calls(calls)
self.assertEqual(mockMo.admin_power, "admin-down")
mock_ucs.return_value.commit.assert_called_once()
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerPhysicalSuccess2(self, mock_ucs, mock_request, mock_current_app):
# setup UCS mocks
mockMo = self.mockManagedObject("sys/chassis-3/blade-3", "compuetBlade")
mock_ucs.return_value.query_dn.side_effect = [mockMo, ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
controler.powerMgmt("sys/chassis-3/blade-3", "off", True)
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
self.assertEqual(mock_ucs.return_value.query_dn.call_count, 1)
calls = [mock.call("sys/chassis-3/blade-3")]
mock_ucs.return_value.query_dn.assert_has_calls(calls)
calls = [mock.call(mockMo)]
mock_ucs.return_value.set_mo.assert_has_calls(calls)
self.assertEqual(mockMo.admin_power, "admin-down")
mock_ucs.return_value.commit.assert_called_once()
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.request')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
def testPowerPhysicalFailure(self, mock_ucs, mock_request, mock_current_app):
# setup UCS mocks
mock_ucs.return_value.login.return_value = True
mock_ucs.return_value.logout.return_value = True
mockMo = self.mockManagedObject("sys/chassis-3/blade-3", "compuetBlade")
mock_ucs.return_value.query_dn.side_effect = [mockMo, ""]
mock_request.headers = MOCK_HEADER
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
# call powerMgmt
result = controler.powerMgmt("sys/chassis-3/blade-3", "foo", True)
# verify UCS Mocks were called
headers = {'ucs-user': USER, 'ucs-host': HOST, 'ucs-password': PASS}
mock_ucs.assert_called_with(headers, mock_current_app.config.get("handlers"))
mock_ucs.return_value.logout.assert_called_once()
self.assertEqual(mock_ucs.return_value.query_dn.call_count, 1)
calls = [mock.call("sys/chassis-3/blade-3")]
mock_ucs.return_value.query_dn.assert_has_calls(calls)
self.assertEqual(result[0]['status'], 500, "expected status 500")
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
@mock.patch('controllers.ucs_controller.request')
def testGetPollersSuccess(self, mock_request, mock_getHandler, mock_current_app):
"""Get Pollers Successfully"""
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything").config
mock_getHandler.return_value.query_classid.side_effect = [
[self.mockCatalogClass(data=MOCK_CLASS_ID_DATA[0])],
[self.mockCatalogClass(data=MOCK_CLASS_ID_DATA[1])]
]
mock_request.headers = MOCK_HEADER
result = controler.getPollers(identifier=MOCK_ID, classIds=MOCK_CLASS_IDS)
calls = []
for i in range(2):
mocked_class_id = MOCK_CLASS_IDS[i]
mocked_filter_str = '(dn, "{}.*", type="re")'.format(MOCK_ID)
mocked_call = mock.call(class_id=mocked_class_id, filter_str=mocked_filter_str)
calls.append(mocked_call)
self.assertEqual(MOCK_CLASS_ID_DATA[i], result[0][mocked_class_id][0]['data'],
'CPU data in result does not equal "{}"'.format(MOCK_CLASS_ID_DATA[i]))
mock_getHandler.return_value.query_classid.assert_has_calls(calls)
@mock.patch('controllers.ucs_controller.current_app')
@mock.patch('controllers.ucs_controller.Ucs._getHandler')
@mock.patch('controllers.ucs_controller.request')
def testGetPollersForbiden(self, mock_request, mock_getHandler, mock_current_app):
"""Invoke a 403 http error"""
mock_current_app.config.get.return_value = self.mockCurrentApp("Anything")
mock_getHandler.return_value = None
mock_request.headers = MOCK_HEADER
result = controler.getPollers(identifier=MOCK_ID, classIds=MOCK_CLASS_IDS)
self.assertEqual(result, ({'status': 403, 'message': 'Forbidden', 'stack': ''}, 403))
| 49.960998
| 126
| 0.680656
| 3,909
| 32,025
| 5.32617
| 0.073932
| 0.061816
| 0.085303
| 0.080644
| 0.823775
| 0.799856
| 0.78122
| 0.768924
| 0.748271
| 0.737272
| 0
| 0.011675
| 0.200281
| 32,025
| 640
| 127
| 50.039063
| 0.801257
| 0.05121
| 0
| 0.596386
| 0
| 0.002008
| 0.217092
| 0.116074
| 0
| 0
| 0
| 0
| 0.154618
| 0
| null | null | 0.048193
| 0.008032
| null | null | 0.004016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89772f8e510ea964f9fbf102e637a819b60ea5ff
| 1,808
|
py
|
Python
|
diff_register/link.py
|
dash2927/diff_register
|
6c5fa73ad6689e7d465f0a6676329fec61ed0685
|
[
"MIT"
] | null | null | null |
diff_register/link.py
|
dash2927/diff_register
|
6c5fa73ad6689e7d465f0a6676329fec61ed0685
|
[
"MIT"
] | null | null | null |
diff_register/link.py
|
dash2927/diff_register
|
6c5fa73ad6689e7d465f0a6676329fec61ed0685
|
[
"MIT"
] | 4
|
2018-05-13T17:14:17.000Z
|
2019-02-07T07:45:41.000Z
|
# def trajectory_mglia_features(tfeatures, mglia_features):
# """
# Calculates microglia-associated features for each trajectory.
#
# Parameters
# ----------
#
# Returns
# -------
#
# Examples
# --------
#
# """
#
# in_cell
# in_soma
# nearest_cell
# nearest_process
# t_in_cell
# t_in_ECS
# cross
# cross_t
# local_cell_density
# cell_associated
#
# return new_tfeatures
#
#
# def trajectory_neuron_features(tfeatures, neuron_features):
# """
# Calculates neuron-associated features for each trajectory.
#
# Parameters
# ----------
#
# Returns
# -------
#
# Examples
# --------
#
# """
#
# in_cell
# in_soma
# nearest_cell
# nearest_process
# t_in_cell
# t_in_ECS
# cross
# cross_t
# local_cell_density
# cell_associated
#
# return new_tfeatures
#
#
# def trajectory_DAPI_features(tfeatures, DAPI_features):
# """
# Calculates nucleus-associated features for each trajectory.
#
# Parameters
# ----------
#
# Returns
# -------
#
# Examples
# --------
#
# """
#
# in_cell
# in_soma
# nearest_cell
# nearest_process
# t_in_cell
# t_in_ECS
# cross
# cross_t
# local_cell_density
# cell_associated
#
# return new_tfeatures
#
#
# def trajectory_cell_features(tfeatures, cell_features):
# """
# Calculates cell-associated features for each trajectory.
#
# Parameters
# ----------
#
# Returns
# -------
#
# Examples
# --------
#
# """
#
# in_cell
# in_soma
# nearest_cell
# nearest_process
# t_in_cell
# t_in_ECS
# cross
# cross_t
# local_cell_density
# cell_associated
#
# return new_tfeatures
| 15.721739
| 67
| 0.550885
| 168
| 1,808
| 5.547619
| 0.166667
| 0.051502
| 0.090129
| 0.107296
| 0.767167
| 0.767167
| 0.767167
| 0.767167
| 0.767167
| 0.767167
| 0
| 0
| 0.304204
| 1,808
| 114
| 68
| 15.859649
| 0.740859
| 0.873341
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89a01f85970d62c54409c3c99ce6383642aaee44
| 3,564
|
py
|
Python
|
tests/test_cmd_view.py
|
a1eko/treem
|
41039b0734bfe84fe637783842849038630ecb7f
|
[
"MIT"
] | 1
|
2020-10-06T13:09:02.000Z
|
2020-10-06T13:09:02.000Z
|
tests/test_cmd_view.py
|
a1eko/treem
|
41039b0734bfe84fe637783842849038630ecb7f
|
[
"MIT"
] | null | null | null |
tests/test_cmd_view.py
|
a1eko/treem
|
41039b0734bfe84fe637783842849038630ecb7f
|
[
"MIT"
] | 1
|
2021-09-22T14:17:22.000Z
|
2021-09-22T14:17:22.000Z
|
"""Testing CLI command view."""
import subprocess
import os
def test_png():
"""Tests for common plot."""
os.chdir(os.path.dirname(__file__) + '/data')
proc = subprocess.Popen(['swc', 'view', 'pass_simple_branch.swc',
'-o', '/tmp/test_treem.png'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = proc.communicate()
assert proc.returncode == 0
assert stdout == ''
assert stderr == ''
def test_pdf():
"""Tests for plot options."""
os.chdir(os.path.dirname(__file__) + '/data')
proc = subprocess.Popen(['swc', 'view', 'pass_simple_branch.swc',
'-t', 'title', '--no-axes', '--show-id',
'--scale', '100', '-c', 'cells',
'-b', '2', '-s', '4', '-m', '3', '9',
'-a', '20', '30',
'-o', '/tmp/test_treem.pdf'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = proc.communicate()
assert proc.returncode == 0
assert stdout == ''
assert stderr == ''
def test_shadow():
"""Tests for plot options."""
os.chdir(os.path.dirname(__file__) + '/data')
proc = subprocess.Popen(['swc', 'view', 'pass_simple_branch.swc',
'pass_zjump.swc', '-c', 'shadow',
'-o', '/tmp/test_treem.png'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = proc.communicate()
assert proc.returncode == 0
assert stdout == ''
assert stderr == ''
def test_proj_xy():
"""Tests for plot projection."""
os.chdir(os.path.dirname(__file__) + '/data')
proc = subprocess.Popen(['swc', 'view', 'pass_simple_branch.swc',
'-j', 'xy',
'-o', '/tmp/test_treem.png'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = proc.communicate()
assert proc.returncode == 0
assert stdout == ''
assert stderr == ''
def test_proj_xz():
"""Tests for plot projection."""
os.chdir(os.path.dirname(__file__) + '/data')
proc = subprocess.Popen(['swc', 'view', 'pass_simple_branch.swc',
'-j', 'xz',
'-o', '/tmp/test_treem.png'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = proc.communicate()
assert proc.returncode == 0
assert stdout == ''
assert stderr == ''
def test_proj_yz():
"""Tests for plot projection."""
os.chdir(os.path.dirname(__file__) + '/data')
proc = subprocess.Popen(['swc', 'view', 'pass_simple_branch.swc',
'-j', 'yz',
'-o', '/tmp/test_treem.png'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)
stdout, stderr = proc.communicate()
assert proc.returncode == 0
assert stdout == ''
assert stderr == ''
| 36.742268
| 69
| 0.477273
| 336
| 3,564
| 4.889881
| 0.202381
| 0.102252
| 0.032867
| 0.047474
| 0.894705
| 0.894705
| 0.894705
| 0.894705
| 0.894705
| 0.894705
| 0
| 0.00763
| 0.37486
| 3,564
| 96
| 70
| 37.125
| 0.729803
| 0.049663
| 0
| 0.776316
| 0
| 0
| 0.125933
| 0.039391
| 0
| 0
| 0
| 0
| 0.236842
| 1
| 0.078947
| false
| 0.092105
| 0.026316
| 0
| 0.105263
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
89a529b9a3e25587a11feaa73e5ce9883fa6b6b9
| 6,327
|
py
|
Python
|
tests/test_api.py
|
irishgordo/newrelic-lambda-cli
|
94039961ee2ca28ada1c4e78eba50a5ba3e84597
|
[
"Apache-2.0"
] | 29
|
2019-11-21T18:39:02.000Z
|
2022-02-11T19:28:25.000Z
|
tests/test_api.py
|
irishgordo/newrelic-lambda-cli
|
94039961ee2ca28ada1c4e78eba50a5ba3e84597
|
[
"Apache-2.0"
] | 118
|
2019-11-21T18:32:00.000Z
|
2022-03-31T23:58:31.000Z
|
tests/test_api.py
|
irishgordo/newrelic-lambda-cli
|
94039961ee2ca28ada1c4e78eba50a5ba3e84597
|
[
"Apache-2.0"
] | 28
|
2019-11-21T18:02:38.000Z
|
2022-02-26T10:08:38.000Z
|
from unittest.mock import Mock
from newrelic_lambda_cli.api import (
create_integration_account,
enable_lambda_integration,
NewRelicGQL,
)
from .conftest import integration_install
def test_create_integration_account():
mock_gql = NewRelicGQL("123456789", "foobar")
mock_gql.query = Mock(
return_value={
"actor": {
"account": {
"cloud": {
"linkedAccounts": [
{
"authLabel": "arn:aws:iam::123456789:role/FooBar",
"externalId": "123456789",
"name": "Foo Bar",
}
]
}
}
}
}
)
input = integration_install(nr_account_id=123456789, linked_account_name="Foo Bar")
role = {"Role": {"Arn": "arn:aws:iam::123456789:role/FooBar"}}
assert create_integration_account(mock_gql, input, role) == {
"authLabel": "arn:aws:iam::123456789:role/FooBar",
"externalId": "123456789",
"name": "Foo Bar",
}
mock_gql.query = Mock(
side_effect=(
{"actor": {"account": {"cloud": {"linkedAccounts": []}}}},
{
"cloudLinkAccount": {
"linkedAccounts": [
{
"authLabel": "arn:aws:iam::123456789:role/FooBar",
"externalId": "123456789",
"name": "Foo Bar",
}
]
}
},
)
)
assert create_integration_account(mock_gql, input, role) == {
"authLabel": "arn:aws:iam::123456789:role/FooBar",
"externalId": "123456789",
"name": "Foo Bar",
}
def test_enable_lambda_integration():
mock_gql = NewRelicGQL("123456789", "foobar")
mock_gql.query = Mock(
return_value={"actor": {"account": {"cloud": {"linkedAccounts": []}}}},
)
input = integration_install(nr_account_id=123456789, linked_account_name="Foo Bar")
lambda_enabled = enable_lambda_integration(mock_gql, input, 123456789)
assert (
lambda_enabled is False
), "Account should be linked to enable the lambda integration"
assert mock_gql.query.call_count == 1
mock_gql.query = Mock(
side_effect=(
{
"actor": {
"account": {
"cloud": {
"linkedAccounts": [
{
"authLabel": "arn:aws:iam::123456789:role/FooBar",
"externalId": "123456789",
"id": 123456789,
"name": "Foo Bar",
"metricCollectionMode": "PUSH",
}
]
}
}
}
},
)
)
lambda_enabled = enable_lambda_integration(mock_gql, input, 123456789)
assert mock_gql.query.call_count == 1
assert (
lambda_enabled is True
), "Accounts in PUSH mode (using Cloudwatch Metrics stream) should already have the Lambda integration enabled"
mock_gql.query = Mock(
side_effect=(
{
"actor": {
"account": {
"cloud": {
"linkedAccounts": [
{
"authLabel": "arn:aws:iam::123456789:role/FooBar",
"externalId": "123456789",
"id": 123456789,
"name": "Foo Bar",
"metricCollectionMode": "PULL",
}
]
}
}
}
},
{
"actor": {
"account": {
"cloud": {
"linkedAccount": {
"integrations": [
{"service": {"isEnabled": True, "slug": "lambda"}}
]
}
}
},
}
},
)
)
lambda_enabled = enable_lambda_integration(mock_gql, input, 123456789)
assert mock_gql.query.call_count == 2
assert (
lambda_enabled is True
), "Account is linked and already has the lambda integration enabled"
mock_gql.query = Mock(
side_effect=(
{
"actor": {
"account": {
"cloud": {
"linkedAccounts": [
{
"authLabel": "arn:aws:iam::123456789:role/FooBar",
"externalId": "123456789",
"id": 123456789,
"name": "Foo Bar",
"metricCollectionMode": "PULL",
}
]
}
}
}
},
{
"actor": {
"account": {"cloud": {"linkedAccount": {"integrations": []}}},
}
},
{
"cloudConfigureIntegration": {
"integrations": [
{
"id": 123456789,
"name": "Foo Bar",
"service": {"isEnabled": True, "slug": "lambda"},
}
]
}
},
)
)
lambda_enabled = enable_lambda_integration(mock_gql, input, 123456789)
assert mock_gql.query.call_count == 3
assert (
lambda_enabled is True
), "Account is linked but didn't have the lambda integration enabled, so it should be configured"
| 33.47619
| 115
| 0.3896
| 411
| 6,327
| 5.824818
| 0.199513
| 0.052632
| 0.050125
| 0.06015
| 0.830827
| 0.77736
| 0.765664
| 0.753551
| 0.720134
| 0.720134
| 0
| 0.079268
| 0.507508
| 6,327
| 188
| 116
| 33.654255
| 0.689024
| 0
| 0
| 0.465116
| 0
| 0
| 0.220484
| 0.046942
| 0
| 0
| 0
| 0
| 0.05814
| 1
| 0.011628
| false
| 0
| 0.017442
| 0
| 0.02907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89a746a653f8d6dc7683406d720411193bfea1c0
| 42
|
py
|
Python
|
katas/kyu_7/sum_of_all_arguments.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
katas/kyu_7/sum_of_all_arguments.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
katas/kyu_7/sum_of_all_arguments.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
def sum_args(*args):
return sum(args)
| 14
| 20
| 0.666667
| 7
| 42
| 3.857143
| 0.571429
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 42
| 2
| 21
| 21
| 0.794118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
9829418094a68d5e66626d128d3b753f814a9ec5
| 80
|
py
|
Python
|
aswan/project/__init__.py
|
papsebestyen/aswan
|
ed1b2a3dae6a8b7de355edd75de8d4ad577c97cd
|
[
"MIT"
] | 1
|
2021-04-28T23:08:07.000Z
|
2021-04-28T23:08:07.000Z
|
aswan/project/__init__.py
|
papsebestyen/aswan
|
ed1b2a3dae6a8b7de355edd75de8d4ad577c97cd
|
[
"MIT"
] | 1
|
2022-01-22T22:02:55.000Z
|
2022-01-22T22:02:55.000Z
|
aswan/project/__init__.py
|
papsebestyen/aswan
|
ed1b2a3dae6a8b7de355edd75de8d4ad577c97cd
|
[
"MIT"
] | 2
|
2022-01-05T10:01:22.000Z
|
2022-02-16T10:58:46.000Z
|
# flake8: noqa
from .core import Project
from .creators import project_from_dir
| 20
| 38
| 0.8125
| 12
| 80
| 5.25
| 0.666667
| 0.412698
| 0.539683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014493
| 0.1375
| 80
| 3
| 39
| 26.666667
| 0.898551
| 0.15
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9840eba495e3cd70dc1e3268dbd3f8acd0b5af0c
| 989
|
py
|
Python
|
electrical/reactions/xmas.py
|
Amperture/twitch-sbc-integration
|
71ee86688a7735e6bb3d18c9896c1b8c7a3662d7
|
[
"MIT"
] | 10
|
2017-04-20T15:15:51.000Z
|
2021-11-17T20:08:01.000Z
|
electrical/reactions/xmas.py
|
Amperture/twitch-sbc-integration
|
71ee86688a7735e6bb3d18c9896c1b8c7a3662d7
|
[
"MIT"
] | null | null | null |
electrical/reactions/xmas.py
|
Amperture/twitch-sbc-integration
|
71ee86688a7735e6bb3d18c9896c1b8c7a3662d7
|
[
"MIT"
] | 2
|
2020-02-08T04:15:43.000Z
|
2021-11-04T09:18:43.000Z
|
import time
def react_chat_xmas(eventType, GPIO):
GREEN_LED = 'P8_7'
RED_LED = 'P8_8'
GPIO.setup(GREEN_LED, GPIO.OUT)
GPIO.setup(RED_LED, GPIO.OUT)
GPIO.output(GREEN_LED, GPIO.LOW)
GPIO.output(RED_LED, GPIO.LOW)
GPIO.output(GREEN_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(GREEN_LED, GPIO.LOW)
GPIO.output(RED_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(RED_LED, GPIO.LOW)
GPIO.output(GREEN_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(GREEN_LED, GPIO.LOW)
GPIO.output(RED_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(RED_LED, GPIO.LOW)
GPIO.output(GREEN_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(GREEN_LED, GPIO.LOW)
GPIO.output(RED_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(RED_LED, GPIO.LOW)
GPIO.output(GREEN_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(GREEN_LED, GPIO.LOW)
GPIO.output(RED_LED, GPIO.HIGH)
time.sleep(0.5)
GPIO.output(RED_LED, GPIO.LOW)
| 23.547619
| 37
| 0.66633
| 168
| 989
| 3.767857
| 0.130952
| 0.221169
| 0.189573
| 0.255924
| 0.834123
| 0.834123
| 0.834123
| 0.834123
| 0.834123
| 0.834123
| 0
| 0.024969
| 0.190091
| 989
| 41
| 38
| 24.121951
| 0.765293
| 0
| 0
| 0.8125
| 0
| 0
| 0.008089
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.03125
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
98544ef3a85d6cd8b4bda068981af6b7b5a217dc
| 15,213
|
py
|
Python
|
Scripts/heft_makespan_fix_inaccur.py
|
radical-experiments/campaign_manager
|
337660cf07a97933b9b516d6612353bd3f6592a8
|
[
"MIT"
] | null | null | null |
Scripts/heft_makespan_fix_inaccur.py
|
radical-experiments/campaign_manager
|
337660cf07a97933b9b516d6612353bd3f6592a8
|
[
"MIT"
] | null | null | null |
Scripts/heft_makespan_fix_inaccur.py
|
radical-experiments/campaign_manager
|
337660cf07a97933b9b516d6612353bd3f6592a8
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
from random import gauss, uniform
def get_makespan(curr_plan, num_resources, workflow_inaccur, positive=False, dynamic_res=False):
'''
Calculate makespan
'''
under = False
reactive_resource_usage = [0] * num_resources
resource_usage = [0] * num_resources
expected = [0] * num_resources
tmp_idx = [0] * num_resources
for placement in curr_plan:
workflow = placement[0]
resource = placement[1]
resource_id = resource['id']
expected_finish = placement[3]
if dynamic_res:
perf = gauss(resource['performance'], resource['performance'] * 0.0644)
else:
perf = resource['performance']
if positive:
inaccur = uniform(0, workflow_inaccur)
else:
inaccur = uniform(-workflow_inaccur, workflow_inaccur)
exec_time = (workflow['num_oper'] * (1 + inaccur)) / perf
reactive_resource_usage[resource_id - 1] += exec_time
resource_usage[resource_id - 1] = max(resource_usage[resource_id - 1] + exec_time, expected_finish)
expected[resource_id - 1] = expected_finish
tmp_idx[resource_id - 1] += 1
return max(resource_usage), max(reactive_resource_usage), max(expected)
# ------------------------------------------------------------------------------
# 5%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p5perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.05, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p5perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.05, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p5perc.csv', index=False)
# ------------------------------------------------------------------------------
# 10%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p10perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.1, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p10perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.1, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p10perc.csv', index=False)
# ------------------------------------------------------------------------------
# 20%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p20perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.2, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p20perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.2, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p20perc.csv', index=False)
# ------------------------------------------------------------------------------
# 30%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p30perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.3, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p30perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.3, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p30perc.csv', index=False)
# ------------------------------------------------------------------------------
# 40%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p40perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.4, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p40perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.4, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p40perc.csv', index=False)
# ------------------------------------------------------------------------------
# 50%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p50perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.5, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p50perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.5, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p50perc.csv', index=False)
# ------------------------------------------------------------------------------
# 60%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p60perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.6, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p60perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.6, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p60perc.csv', index=False)
# ------------------------------------------------------------------------------
# 70%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p70perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.7, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p70perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.7, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p70perc.csv', index=False)
# ------------------------------------------------------------------------------
# 80%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p80perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.8, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p80perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.8, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p80perc.csv', index=False)
# ------------------------------------------------------------------------------
# 90%
test_case = pd.read_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p90perc.csv')
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.9, dynamic_res=True, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/DynHeteroResources_StHeteroCampaignsHEFT_inaccur_p90perc.csv', index=False)
results = pd.DataFrame(columns=['size','planner','plan','makespan', 'reactive', 'expected','mpn_snt', 'rect_snt', 'time'])
for idx, row in test_case.iterrows():
size = row['size']
planner = row['planner']
plan = eval(row['plan'])
makespan, reactive, expected = get_makespan(plan, size, 0.9, dynamic_res=False, positive=True)
time = row['time']
results.loc[len(results)] = [size, planner, plan, makespan, reactive, expected, makespan - expected, reactive - expected, time]
results.to_csv('../Data/heft/StHeteroResources_StHeteroCampaignsHEFT_inaccur_p90perc.csv', index=False)
| 53.006969
| 131
| 0.668967
| 1,802
| 15,213
| 5.498335
| 0.056049
| 0.129189
| 0.121114
| 0.16956
| 0.93147
| 0.923799
| 0.903714
| 0.897255
| 0.883327
| 0.883327
| 0
| 0.010382
| 0.126274
| 15,213
| 286
| 132
| 53.192308
| 0.735029
| 0.055742
| 0
| 0.651376
| 0
| 0
| 0.262387
| 0.151919
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004587
| false
| 0
| 0.013761
| 0
| 0.022936
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98725982e59476791cee02f920e73fb8a2a5d173
| 351
|
py
|
Python
|
example/list_compare.py
|
TalosThoren/python-demo
|
098b875767bc9813a8543ca4d940da0f187022eb
|
[
"MIT"
] | null | null | null |
example/list_compare.py
|
TalosThoren/python-demo
|
098b875767bc9813a8543ca4d940da0f187022eb
|
[
"MIT"
] | null | null | null |
example/list_compare.py
|
TalosThoren/python-demo
|
098b875767bc9813a8543ca4d940da0f187022eb
|
[
"MIT"
] | null | null | null |
def find_missing( current_list, target_list ):
return [ x for x in target_list if x not in current_list ]
def compare( current_list, target_list ):
additions_list = find_missing( current_list, target_list )
deletions_list = find_missing( target_list, current_list )
return { 'additions': additions_list, 'deletions': deletions_list }
| 43.875
| 71
| 0.754986
| 49
| 351
| 5.061224
| 0.306122
| 0.221774
| 0.205645
| 0.254032
| 0.258065
| 0.258065
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168091
| 351
| 7
| 72
| 50.142857
| 0.849315
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7f33198e04cd00d4c5e4a1463f75fa13627c0a46
| 9,994
|
py
|
Python
|
tests.py
|
joshiatul/simple_glm
|
4d986ce2d9b062e6c5a42e4df1450790c55ccc39
|
[
"MIT"
] | null | null | null |
tests.py
|
joshiatul/simple_glm
|
4d986ce2d9b062e6c5a42e4df1450790c55ccc39
|
[
"MIT"
] | null | null | null |
tests.py
|
joshiatul/simple_glm
|
4d986ce2d9b062e6c5a42e4df1450790c55ccc39
|
[
"MIT"
] | null | null | null |
import numpy as np
import patsy as ptsy
import pandas as pd
import statsmodels.api as sm
from sklearn import linear_model
from glm import LinearModel
# TODO Write valid unittests
def generate_data(n, loss, wt_param=2, return_rate=False):
"""
Generate random data for testing
:param n:
:param loss:
:param wt_param:
:return:
"""
w = np.random.randint(1, wt_param, n)
if loss == 'squared':
y = np.random.normal(50, 100, size=n)
if loss == 'logistic':
# Binomial - n numper of trials, p probability [p_or_label trials] - response
if return_rate:
y = [sum(np.random.binomial(1, 0.1, 100) == 1) / 100.0 for _ in xrange(n)]
else:
y = np.random.binomial(1, 0.1, n)
if loss == 'poisson':
# Poisson - lambda expected events in an interval [avg_no_of_events_or_rate trials] - response
if return_rate:
y = [count*1.0/trials for count, trials in zip(np.random.poisson(10, size=n), w)]
else:
y = np.random.poisson(10, size=n)
d = {'value': y,
'feature1': [np.random.choice(['a', 'b', 'c']) for _ in xrange(n)],
'feature2': [np.random.choice(['pp', 'qq']) for _ in xrange(n)]}
df = pd.DataFrame(d)
out = ptsy.dmatrices('value ~ feature1 + feature2', data=df, return_type='dataframe')
y, X = out
return w, y, X
def test_linear_model_without_regularization(n=100):
print '----- Linear model without weights and regularization ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='squared')
# Our simple
glm_simple = LinearModel(loss='squared')
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
# Statmodels
glm = sm.GLM(y, X, family=sm.families.Gaussian())
res = glm.fit(method='qr')
test_results['sm_glm'] = res.params.values.tolist()
summary = res.summary()
# Sklearn
sk_glm = linear_model.LinearRegression(fit_intercept=False) # Since design matrix has added intercept
sk_glm.fit(X, y)
test_results['sklearn_glm'] = sk_glm.coef_.ravel().tolist()
print test_results
def test_logistic_model_without_regularization_no_rate(n=100):
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='logistic', wt_param=1000, return_rate=False)
# Statmodels
y = np.array(y).ravel()
# weights = np.array(weights).ravel().astype('int')
# y = np.multiply(y, weights).astype('int')
# non_actions = np.subtract(weights, y).astype('int')
# y_sm = np.array(zip(y, non_actions))
glm = sm.GLM(y, X, family=sm.families.Binomial())
res = glm.fit()
test_results['sm_glm'] = res.params
# Sklearn (Expects labels not probabilities)
sk_glm = linear_model.LogisticRegression(fit_intercept=False) # Since design matrix has added intercept
sk_glm.fit(X, y)
test_results['sklearn_glm'] = sk_glm.coef_.ravel().tolist()
print test_results
def test_weighted_logistic_model_without_regularization_with_rate(n=100):
print '----- Weighted Logistic model without regularization and response as rate (response between 0 and 1) ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='logistic', wt_param=1000, return_rate=True)
# Our simple
glm_simple = LinearModel(loss='logistic')
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
# Statmodels
y = np.array(y).ravel()
weights = np.array(weights).ravel().astype('float')
y = np.multiply(y, weights).astype('float')
non_actions = np.subtract(weights, y).astype('float')
y_sm = np.array(zip(y, non_actions))
glm = sm.GLM(y_sm, X, family=sm.families.Binomial())
res = glm.fit()
test_results['sm_glm'] = res.params.ravel().tolist()
print test_results
def test_poisson_model_without_regularization_with_count(n=100):
print '----- Poisson model without weights and regularization ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='poisson')
# Our simple
glm_simple = LinearModel(loss='poisson')
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
# Statmodels (poisson strictly expect count data, out of 1 trial)
y = np.array(y).ravel()
glm = sm.GLM(y, X, family=sm.families.Poisson())
res = glm.fit()
test_results['sm_glm'] = res.params.ravel().tolist()
print test_results
def test_weighted_linear_model_without_regularization(n=100):
print '----- Weighted Linear model without regularization ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='squared', wt_param=10)
# Our simple
glm_simple = LinearModel(loss='squared')
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
# Statmodels
wls = sm.WLS(y,X, weights=weights)
res1 = wls.fit()
test_results['sm_wls'] = res1.params.values.tolist()
# Sklearn
sk_glm = linear_model.LinearRegression(fit_intercept=False) # Since design matrix has added intercept
sk_glm.fit(X, y, sample_weight=weights)
test_results['sklearn_glm'] = sk_glm.coef_.ravel().tolist()
print test_results
# def test_weighted_logistic_model_without_regularization(n=100):
# print '----- Weighted Logistic model without regularization ------'
# d = dict()
# test_results = pd.DataFrame(d.items())
# weights, y, X = generate_data(n, loss='logistic', wt_param=1000, return_rate=True)
# # Our simple
# glm_simple = LinearRegression(loss='logistic')
# glm_simple_res = glm_simple.fit(X, y, weights)
# test_results['simple_glm'] = glm_simple_res
#
# # Statmodels
# y = np.array(y).ravel()
# weights = np.array(weights).ravel().astype('float')
# y = np.multiply(y, weights).astype('float')
# non_actions = np.subtract(weights, y).astype('float')
# y_sm = np.array(zip(y, non_actions))
# glm = sm.GLM(y_sm, X, family=sm.families.Binomial())
# res = glm.fit()
# test_results['sm_glm'] = res.params.ravel().tolist()
# print test_results
def test_weighted_poisson_model_without_regularization_with_count(n=100):
print '----- Poisson model with weights and without regularization ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='poisson', wt_param=10, return_rate=True)
# Our simple
glm_simple = LinearModel(loss='poisson')
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
# Nothing to test / compare to :
# Probably Best thing to compare to is statmodels logistic with scaled response
print test_results
def test_weighted_linear_model_with_regularization(n=100):
print '----- Weighted Linear model WITH regularization ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='squared', wt_param=10)
# Match prior mean and prior var
# Pass different means here e.g.
prior_mean = {}
prior_var = {}
for i in X.columns:
prior_mean[i] = 0
prior_var[i] = 0.000001
prior_mean["Intercept"] = np.mean(y.values)
# Our simple
glm_simple = LinearModel(loss='squared', regularize_intercept=True, prior_mean=prior_mean, prior_var=prior_var)
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
print prior_mean
print test_results
def test_weighted_logistic_model_with_regularization(n=100):
print '----- Weighted Logistic model WITH regularization and response as rate (response between 0 and 1) ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='logistic', wt_param=1000, return_rate=True)
# Match prior mean and prior var
# Pass different means here e.g.
prior_mean = {}
prior_var = {}
for i in X.columns:
prior_mean[i] = 0
prior_var[i] = 0.000001
prior_mean["Intercept"] = np.mean(y.values)
# Our simple
glm_simple = LinearModel(loss='logistic', regularize_intercept=True, prior_mean=prior_mean, prior_var=prior_var)
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
print prior_mean
print test_results
def test_weighted_poisson_model_with_regularization(n=100):
print '----- Poisson model with weights and without regularization ------'
d = dict()
test_results = pd.DataFrame(d.items())
weights, y, X = generate_data(n, loss='poisson', wt_param=10, return_rate=True)
prior_mean = {}
prior_var = {}
for i in X.columns:
prior_mean[i] = 0
prior_var[i] = 0.000001
prior_mean["Intercept"] = np.mean(y.values)
# Our simple
glm_simple = LinearModel(loss='poisson', regularize_intercept=True, prior_mean=prior_mean, prior_var=prior_var)
glm_simple_res = glm_simple.fit(X, y, weights)
test_results['simple_glm'] = glm_simple_res
# Nothing to test / compare to :
# Probably Best thing to compare to is statmodels logistic with scaled response
print prior_mean
print test_results
if __name__ == '__main__':
test_linear_model_without_regularization(n=100)
test_weighted_linear_model_without_regularization(n=100)
#### test_logistic_model_without_regularization_no_rate(n=100) # Not done yet
test_weighted_logistic_model_without_regularization_with_rate(n=100)
test_poisson_model_without_regularization_with_count(n=100)
test_weighted_poisson_model_without_regularization_with_count(n=100)
test_weighted_linear_model_with_regularization(n=100)
test_weighted_logistic_model_with_regularization(n=100)
test_weighted_poisson_model_with_regularization(n=100)
| 38.88716
| 119
| 0.682309
| 1,420
| 9,994
| 4.570423
| 0.120423
| 0.064407
| 0.033282
| 0.028814
| 0.845455
| 0.842065
| 0.809553
| 0.777196
| 0.708783
| 0.692296
| 0
| 0.017663
| 0.189914
| 9,994
| 257
| 120
| 38.88716
| 0.783967
| 0.201721
| 0
| 0.572289
| 1
| 0
| 0.125577
| 0
| 0
| 0
| 0
| 0.003891
| 0
| 0
| null | null | 0
| 0.036145
| null | null | 0.120482
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f6c7b43e2057b930cfc0f89212f8d29bdef7e82a
| 99
|
py
|
Python
|
Constants/__init__.py
|
joey00072/Marathi-Programing-Language
|
190cbb40eca25413f3c69056b13a4399c1e06fdd
|
[
"MIT"
] | 52
|
2021-07-11T14:49:33.000Z
|
2022-03-16T02:45:06.000Z
|
Constants/__init__.py
|
d-kaustubh/Baji-Marathi-Programing-Language
|
0a03193c1af8f2d718ed0439931335ba961ca756
|
[
"MIT"
] | 1
|
2021-07-15T04:13:41.000Z
|
2021-08-06T19:41:01.000Z
|
Constants/__init__.py
|
d-kaustubh/Baji-Marathi-Programing-Language
|
0a03193c1af8f2d718ed0439931335ba961ca756
|
[
"MIT"
] | 6
|
2021-07-13T02:27:21.000Z
|
2022-01-12T10:33:45.000Z
|
from Constants.constants import *
from Constants.tokens import *
from Constants.keywords import *
| 33
| 34
| 0.808081
| 12
| 99
| 6.666667
| 0.416667
| 0.4875
| 0.475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131313
| 99
| 3
| 35
| 33
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
121e49da80c3307a89d161c5b77aec10cf9ecdbc
| 7,193
|
py
|
Python
|
test/test_systems_generators_dockerfile.py
|
SanthoshBala18/skelebot
|
13055dba1399b56a76a392699aa0aa259ca916a9
|
[
"MIT"
] | null | null | null |
test/test_systems_generators_dockerfile.py
|
SanthoshBala18/skelebot
|
13055dba1399b56a76a392699aa0aa259ca916a9
|
[
"MIT"
] | null | null | null |
test/test_systems_generators_dockerfile.py
|
SanthoshBala18/skelebot
|
13055dba1399b56a76a392699aa0aa259ca916a9
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from unittest import mock
import skelebot as sb
import os
class TestDockerfile(TestCase):
path = ""
# Get the path to the current working directory before we mock the function to do so
def setUp(self):
self.path = os.getcwd()
self.maxDiff = None
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_no_language(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = None
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM ubuntu:18.04
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
COPY . /app
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_base(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "R"
config.dependencies.append("github:github.com/repo:cool-lib")
config.dependencies.append("file:libs/proj:cool-proj")
config.dependencies.append("dtable=9.0")
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/r-base
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["Rscript", "-e", "install.packages('pyyaml', repo='https://cloud.r-project.org'); library(pyyaml)"]
RUN ["Rscript", "-e", "install.packages('artifactory', repo='https://cloud.r-project.org'); library(artifactory)"]
RUN ["Rscript", "-e", "install.packages('argparse', repo='https://cloud.r-project.org'); library(argparse)"]
RUN ["Rscript", "-e", "install.packages('coverage', repo='https://cloud.r-project.org'); library(coverage)"]
RUN ["Rscript", "-e", "install.packages('pytest', repo='https://cloud.r-project.org'); library(pytest)"]
RUN ["Rscript", "-e", "library(devtools); install_github('github.com/repo'); library(cool-lib)"]
COPY libs/proj libs/proj
RUN ["Rscript", "-e", "install.packages('/app/libs/proj', repos=NULL, type='source'); library(cool-proj)"]
RUN ["Rscript", "-e", "library(devtools); install_version('dtable', version='9.0', repos='http://cran.us.r-project.org'); library(dtable)"]
COPY . /app
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
print(data)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_krb(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.language = "R"
config.dependencies.append("github:github.com/repo:cool-lib")
config.dependencies.append("file:libs/proj:cool-proj")
config.dependencies.append("dtable=9.0")
config.components.append(sb.components.kerberos.Kerberos("conf", "tab", "user"))
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM skelebot/r-krb
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["Rscript", "-e", "install.packages('pyyaml', repo='https://cloud.r-project.org'); library(pyyaml)"]
RUN ["Rscript", "-e", "install.packages('artifactory', repo='https://cloud.r-project.org'); library(artifactory)"]
RUN ["Rscript", "-e", "install.packages('argparse', repo='https://cloud.r-project.org'); library(argparse)"]
RUN ["Rscript", "-e", "install.packages('coverage', repo='https://cloud.r-project.org'); library(coverage)"]
RUN ["Rscript", "-e", "install.packages('pytest', repo='https://cloud.r-project.org'); library(pytest)"]
RUN ["Rscript", "-e", "library(devtools); install_github('github.com/repo'); library(cool-lib)"]
COPY libs/proj libs/proj
RUN ["Rscript", "-e", "install.packages('/app/libs/proj', repos=NULL, type='source'); library(cool-proj)"]
RUN ["Rscript", "-e", "library(devtools); install_version('dtable', version='9.0', repos='http://cran.us.r-project.org'); library(dtable)"]
COPY . /app
COPY conf /etc/krb5.conf
COPY tab /krb/auth.keytab
CMD /bin/bash -c \"/./krb/init.sh user && bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
print(data)
self.assertEqual(data, expectedDockerfile)
@mock.patch('os.path.expanduser')
@mock.patch('os.getcwd')
def test_buildDockerfile_custom(self, mock_getcwd, mock_expanduser):
folderPath = "{path}/test/files".format(path=self.path)
filePath = "{folder}/Dockerfile".format(folder=folderPath)
mock_expanduser.return_value = "{path}/test/plugins".format(path=self.path)
mock_getcwd.return_value = folderPath
config = sb.systems.generators.yaml.loadConfig()
config.baseImage = "whatever:uwant"
expectedDockerfile = """
# This Dockerfile was generated by Skelebot
# Editing this file manually is not advised as all changes will be overwritten by Skelebot
FROM whatever:uwant
MAINTAINER Mega Man <megaman@cars.com>
WORKDIR /app
RUN ["pip", "install", "pyyaml"]
RUN ["pip", "install", "artifactory"]
RUN ["pip", "install", "argparse"]
RUN ["pip", "install", "coverage"]
RUN ["pip", "install", "pytest"]
COPY . /app
CMD /bin/bash -c \"bash build.sh --env local --log info\"\n"""
sb.systems.generators.dockerfile.buildDockerfile(config)
data = None
with open(filePath, "r") as file:
data = file.read()
self.assertTrue(data is not None)
print(data)
self.assertEqual(data, expectedDockerfile)
if __name__ == '__main__':
unittest.main()
| 43.071856
| 139
| 0.679967
| 920
| 7,193
| 5.267391
| 0.16413
| 0.033017
| 0.036319
| 0.044573
| 0.888362
| 0.888362
| 0.888362
| 0.888362
| 0.879901
| 0.870821
| 0
| 0.002158
| 0.162519
| 7,193
| 166
| 140
| 43.331325
| 0.802291
| 0.0114
| 0
| 0.782609
| 0
| 0.123188
| 0.518779
| 0.08862
| 0
| 0
| 0
| 0
| 0.057971
| 1
| 0.036232
| false
| 0
| 0.028986
| 0
| 0.07971
| 0.021739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12447bb2e40b520a65eb2224b499b9e2f34e6525
| 8,564
|
py
|
Python
|
tests/ddo/ddo_sample1_compute.py
|
oceanprotocol/provider-service-py
|
408a9032b30d3606a6b991f3982b7d17ded7cd47
|
[
"Apache-2.0"
] | null | null | null |
tests/ddo/ddo_sample1_compute.py
|
oceanprotocol/provider-service-py
|
408a9032b30d3606a6b991f3982b7d17ded7cd47
|
[
"Apache-2.0"
] | null | null | null |
tests/ddo/ddo_sample1_compute.py
|
oceanprotocol/provider-service-py
|
408a9032b30d3606a6b991f3982b7d17ded7cd47
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2021 Ocean Protocol Foundation
# SPDX-License-Identifier: Apache-2.0
#
ddo_dict = {
"id": "did:op:e16a777d1f146dba369cf98d212f34c17d9de516fcda5c9546076cf043ba6e37",
"version": "4.1.0",
"chain_id": 8996,
"metadata": {
"created": "2021-12-29T13:34:27",
"updated": "2021-12-29T13:34:27",
"description": "Asset description",
"copyrightHolder": "Asset copyright holder",
"name": "Asset name",
"author": "Asset Author",
"license": "CC-0",
"links": ["https://google.com"],
"contentLanguage": "en-US",
"categories": ["category 1"],
"tags": ["tag 1"],
"additionalInformation": {},
"type": "dataset",
},
"services": [
{
"index": 0,
"id": "compute_1",
"type": "compute",
"name": "compute_1",
"description": "compute_1",
"datatokenAddress": "0x0951D2558F897317e5a68d1b9e743156D1681168",
"serviceEndpoint": "http://172.15.0.4:8030/api/services",
"files": "0x0442b53536ebb3f1ee0301288efc7a14b9f807e9d104647ca052b0fb67954440bf18f9b5143c94ac5e7dedbefacccbf38783728de2f4c8af8468dca630e1e92e5c7c03cb82b4956fcfecb9fe6f19771df19e7e8dd06b4d6665bbe5b3d5bf5dbf5781b4f3ee97c7864a98d903df4acea2ad39176aed782b3faad82808ca4709382ccd8fa42561830069293d7cd6685696a54fc752f6d78fe7b3ed598636c5447fa593ffe4929280f3e6720f159251474035a29bdc11ae73150d3871600010dd97bd7de63cb64b338d4f5c1a9b70c082df801864a7d4f6c19e5568361e3cf6a3e795f5ae7c8972019405c113a33b5ae09a4dd5cdeff0",
"timeout": 3600,
"compute": {
"namespace": "test",
"allowRawAlgorithm": True,
"allowNetworkAccess": False,
"publisherTrustedAlgorithmPublishers": [],
"publisherTrustedAlgorithms": [
{
"did": "did:op:706d7452b1a25b183051fe02f2ad902d54fc45a43fdcee26b20f21684b5dee72",
"filesChecksum": "09903ddb8459c7ade1ea2bb639207c47f3474a060ae4ace9ba9581c71b9a3f54",
"containerSectionChecksum": "743e3591b4c035906be7dbc9eb592089d096be3b2d752f8d8d52917dd609f31f",
}
],
},
},
{
"index": 1,
"id": "access_1",
"type": "access",
"name": "name doesn't affect tests",
"description": "decription doesn't affect tests",
"datatokenAddress": "0x12d1d7BaF6fE43805391097A63301ACfcF5f5720",
"serviceEndpoint": "http://172.15.0.4:8030",
"files": "0x0487db5b45655d0ce74cf6e4707c2dd40509cb4d8f80af76758790b4ab715d7658a1f71ee1ee744e7af87275113bd0fde5f8362431934407c8e8bd6f20b1216de4f94cb3d03b975b5c61c5c9e6ac3373e50fc2d181c1b2808f9bca18a59180b77baad213c4dda70ddd866e6cbb0d6eae1036b6e0e8e8c2e17ca0e55180b2afb00acaa27bc343117457bb8d56d670d1e42ed6834b52c4a7f2eb035cb4bd98e24e5ba28935b67071d77d0edcd914572da492c72d0c049ed47d37a84b56a6be311b27fde9aea893afe408d2e96ce330e46443c2ee02ba5ee8757c5d3ef917de9863d13f843fb37794accad4d029c960fe4a56c3cc3d70",
"timeout": 3600,
"compute_dict": None,
},
],
"credentials": {"allow": [], "deny": []},
"nft": {
"address": "0x7358776DACe83a4b48E698645F32B043481daCBA",
"name": "Data NFT 1",
"symbol": "DNFT1",
"state": 0,
"owner": "0xBE5449a6A97aD46c8558A3356267Ee5D2731ab5e",
"created": "2021-12-29T13:34:28",
},
"datatokens": [
{
"address": "0x0951D2558F897317e5a68d1b9e743156D1681168",
"name": "Datatoken 1",
"symbol": "DT1",
"serviceId": "compute_1",
}
],
"event": {
"tx": "0xa73c332ba8d9615c438e7773d8f8db6a258cc615e43e47130e5500a9da729cea",
"block": 121,
"from": "0xBE5449a6A97aD46c8558A3356267Ee5D2731ab5e",
"contract": "0x7358776DACe83a4b48E698645F32B043481daCBA",
"datetime": "2021-12-29T13:34:28",
},
"stats": {"consumes": -1, "isInPurgatory": "false"},
}
alg_ddo_dict = {
"id": "did:op:706d7452b1a25b183051fe02f2ad902d54fc45a43fdcee26b20f21684b5dee72",
"version": "4.1.0",
"chain_id": 8996,
"metadata": {
"created": "2021-12-29T13:34:18",
"updated": "2021-12-29T13:34:18",
"description": "Asset description",
"copyrightHolder": "Asset copyright holder",
"name": "Asset name",
"author": "Asset Author",
"license": "CC-0",
"links": ["https://google.com"],
"contentLanguage": "en-US",
"categories": ["category 1"],
"tags": ["tag 1"],
"additionalInformation": {},
"type": "algorithm",
"algorithm": {
"language": "python",
"version": "0.1.0",
"container": {
"entrypoint": "run.sh",
"image": "my-docker-image",
"tag": "latest",
"checksum": "44e10daa6637893f4276bb8d7301eb35306ece50f61ca34dcab550",
},
},
},
"services": [
{
"index": 0,
"id": "b4d208d6-0074-4002-9dd1-02d5d0ad352e",
"type": "access",
"name": "name doesn't affect tests",
"description": "decription doesn't affect tests",
"datatokenAddress": "0x12d1d7BaF6fE43805391097A63301ACfcF5f5720",
"serviceEndpoint": "http://172.15.0.4:8030",
"files": "0x0487db5b45655d0ce74cf6e4707c2dd40509cb4d8f80af76758790b4ab715d7658a1f71ee1ee744e7af87275113bd0fde5f8362431934407c8e8bd6f20b1216de4f94cb3d03b975b5c61c5c9e6ac3373e50fc2d181c1b2808f9bca18a59180b77baad213c4dda70ddd866e6cbb0d6eae1036b6e0e8e8c2e17ca0e55180b2afb00acaa27bc343117457bb8d56d670d1e42ed6834b52c4a7f2eb035cb4bd98e24e5ba28935b67071d77d0edcd914572da492c72d0c049ed47d37a84b56a6be311b27fde9aea893afe408d2e96ce330e46443c2ee02ba5ee8757c5d3ef917de9863d13f843fb37794accad4d029c960fe4a56c3cc3d70",
"timeout": 3600,
"compute_dict": None,
},
{
"index": 1,
"id": "compute_1",
"type": "compute",
"name": "compute_1",
"description": "compute_1",
"datatokenAddress": "0x0951D2558F897317e5a68d1b9e743156D1681168",
"serviceEndpoint": "http://172.15.0.4:8030/api/services",
"files": "0x0442b53536ebb3f1ee0301288efc7a14b9f807e9d104647ca052b0fb67954440bf18f9b5143c94ac5e7dedbefacccbf38783728de2f4c8af8468dca630e1e92e5c7c03cb82b4956fcfecb9fe6f19771df19e7e8dd06b4d6665bbe5b3d5bf5dbf5781b4f3ee97c7864a98d903df4acea2ad39176aed782b3faad82808ca4709382ccd8fa42561830069293d7cd6685696a54fc752f6d78fe7b3ed598636c5447fa593ffe4929280f3e6720f159251474035a29bdc11ae73150d3871600010dd97bd7de63cb64b338d4f5c1a9b70c082df801864a7d4f6c19e5568361e3cf6a3e795f5ae7c8972019405c113a33b5ae09a4dd5cdeff0",
"timeout": 3600,
"compute": {
"namespace": "test",
"allowRawAlgorithm": True,
"allowNetworkAccess": False,
"publisherTrustedAlgorithmPublishers": [],
"publisherTrustedAlgorithms": [
{
"did": "did:op:706d7452b1a25b183051fe02f2ad902d54fc45a43fdcee26b20f21684b5dee72",
"filesChecksum": "09903ddb8459c7ade1ea2bb639207c47f3474a060ae4ace9ba9581c71b9a3f54",
"containerSectionChecksum": "743e3591b4c035906be7dbc9eb592089d096be3b2d752f8d8d52917dd609f31f",
}
],
},
},
],
"credentials": {"allow": [], "deny": []},
"nft": {
"address": "0xa072B0D477fae1aBE3537Ff66A8389B184E18F4d",
"name": "Data NFT 1",
"symbol": "DNFT1",
"state": 0,
"owner": "0xBE5449a6A97aD46c8558A3356267Ee5D2731ab5e",
"created": "2021-12-29T13:34:20",
},
"datatokens": [
{
"address": "0x12d1d7BaF6fE43805391097A63301ACfcF5f5720",
"name": "Datatoken 1",
"symbol": "DT1",
"serviceId": "b4d208d6-0074-4002-9dd1-02d5d0ad352e",
}
],
"event": {
"tx": "0x09366c3bf4b24eabbe6de4a1ee63c07fca82c768fcff76e18e8dd461197f2aba",
"block": 116,
"from": "0xBE5449a6A97aD46c8558A3356267Ee5D2731ab5e",
"contract": "0xa072B0D477fae1aBE3537Ff66A8389B184E18F4d",
"datetime": "2021-12-29T13:34:20",
},
"stats": {"consumes": -1, "isInPurgatory": "false"},
}
| 47.054945
| 516
| 0.636385
| 421
| 8,564
| 12.909739
| 0.330166
| 0.008832
| 0.016191
| 0.019135
| 0.820975
| 0.763201
| 0.751426
| 0.751426
| 0.751426
| 0.751426
| 0
| 0.356611
| 0.247548
| 8,564
| 181
| 517
| 47.314917
| 0.486809
| 0.008874
| 0
| 0.636364
| 0
| 0
| 0.641047
| 0.42037
| 0
| 0
| 0.316869
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
124a8b8a2f746c157a3a634cfea112e0bb8e17cc
| 41,313
|
py
|
Python
|
tests/test_password_based_shooting_stars_resource.py
|
andmcadams/shooting-stars-server
|
7bd8f74401dcf013d1a55edf1d97527d6cb77543
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_password_based_shooting_stars_resource.py
|
andmcadams/shooting-stars-server
|
7bd8f74401dcf013d1a55edf1d97527d6cb77543
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_password_based_shooting_stars_resource.py
|
andmcadams/shooting-stars-server
|
7bd8f74401dcf013d1a55edf1d97527d6cb77543
|
[
"BSD-2-Clause"
] | 3
|
2021-11-16T13:27:01.000Z
|
2022-03-21T02:48:42.000Z
|
import os
import sqlite3
import falcon
import datetime
from falcon import testing
from freezegun import freeze_time
import password_based_shooting_stars_resource
import setup_db
from constants import ERROR_MSG_DATA_VALIDATION_FAIL, ERROR_MSG_AUTHORIZATION_FAIL, ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT
from password_based_shooting_stars_resource import PasswordBasedShootingStarsResource
FROZEN_UNIX_TIME = 1635422400
FROZEN_TIME = datetime.datetime.fromtimestamp(FROZEN_UNIX_TIME, tz=datetime.timezone.utc).isoformat()
PATH_TO_TEST_DB = 'test_db.db'
def create_test_app(conn: sqlite3.Connection):
# falcon.App instances are callable WSGI apps
# in larger applications the app is created in a separate file
app = falcon.App()
# Resources are represented by long-lived class instances
shooting_stars_resource = PasswordBasedShootingStarsResource(conn)
password_based_shooting_stars_resource.scout_pw_whitelist.add('testpw')
password_based_shooting_stars_resource.scout_pw_whitelist.add('testpw2')
password_based_shooting_stars_resource.master_pw_whitelist.add('masterpw')
app.add_route('/shooting_stars', shooting_stars_resource)
app.add_route('/audit', shooting_stars_resource, suffix='separate')
app.add_route('/whitelist', shooting_stars_resource, suffix='whitelist')
app.add_static_route('/portal', os.environ['STATIC_ASSETS_FOLDER'])
return app, shooting_stars_resource
def add_data(connection, star_data: dict, shared_key: str):
connection.execute("""
INSERT INTO data
(location, world, minTime, maxTime, sharedKey)
VALUES (?, ?, ?, ?, ?)
""", [star_data['location'], star_data['world'], star_data['minTime'], star_data['maxTime'], shared_key])
connection.commit()
class TestCase(testing.TestCase):
def setUp(self):
super(TestCase, self).setUp()
self.conn = setup_db.create_shared_key_db(PATH_TO_TEST_DB, create_whitelists=True)
self.conn.row_factory = sqlite3.Row
app, self.shooting_stars_resource = create_test_app(self.conn)
self.app = testing.TestClient(app)
def tearDown(self) -> None:
super(TestCase, self).tearDown()
self.conn.close()
del self.app
setup_db.delete_db(PATH_TO_TEST_DB)
@freeze_time(FROZEN_TIME)
class TestPasswordShootingStarsResourceGet(TestCase):
def test_simple(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
add_data(self.conn, test_data, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'global'})
assert resp.status == falcon.HTTP_200
assert resp.json == [test_data]
def test_multiple_data_points(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
test_data_2 = test_data.copy()
test_data_2['world'] = 304
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'global'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 2
assert test_data in resp.json
assert test_data_2 in resp.json
def test_multiple_data_points_diff_keys(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
test_data_2 = test_data.copy()
test_data_2['world'] = 304
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'masterpw')
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'global'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 2
assert test_data in resp.json
assert test_data_2 in resp.json
def test_multiple_data_points_diff_keys_same_world(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
test_data_2 = test_data.copy()
test_data_2['minTime'] = FROZEN_UNIX_TIME
test_data_2['maxTime'] = FROZEN_UNIX_TIME + 200
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'masterpw')
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'global'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 1
data: dict = resp.json[0]
assert data['location'] == 10
assert data['world'] == 302
assert data['minTime'] == FROZEN_UNIX_TIME
assert data['maxTime'] == FROZEN_UNIX_TIME + 100
def test_out_of_range(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - (100 + (60*60)),
'maxTime': FROZEN_UNIX_TIME - (60*60)
}
add_data(self.conn, test_data, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'global'})
assert resp.status == falcon.HTTP_200
assert resp.json == []
def test_edge_of_range(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - (100 + (60*60)),
'maxTime': FROZEN_UNIX_TIME - (60*60) + 1
}
add_data(self.conn, test_data, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'global'})
assert resp.status == falcon.HTTP_200
assert resp.json == [test_data]
def test_simple_with_old_data(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - (100 + (60*60)),
'maxTime': FROZEN_UNIX_TIME - (50 + (60*60))
}
test_data_2 = {
'location': 8,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 100,
'maxTime': FROZEN_UNIX_TIME + 1000
}
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'global'})
assert resp.status == falcon.HTTP_200
assert resp.json == [test_data_2]
def test_validation_fail_no_auth(self):
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars')
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL}
def test_validation_fail_non_alpha_auth(self):
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': '123456'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL}
def test_validation_fail_alpha_numeric_auth(self):
resp: falcon.testing.Result = self.app.simulate_get('/shooting_stars', headers={'Authorization': 'a1b2c3d4'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL}
@freeze_time(FROZEN_TIME)
class TestShootingStarsResourcePost(TestCase):
def test_empty_list(self):
resp = self.app.simulate_post('/shooting_stars', json=[], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_200
def test_empty_list_master_pw(self):
resp = self.app.simulate_post('/shooting_stars', json=[], headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
def test_single(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_200
rows = self.conn.execute('''SELECT * FROM data''').fetchall()
assert len(rows) == 1
assert rows[0]['sharedKey'] == 'testpw'
assert rows[0]['location'] == test_data['location']
assert rows[0]['world'] == test_data['world']
assert rows[0]['minTime'] == test_data['minTime']
assert rows[0]['maxTime'] == test_data['maxTime']
def test_two(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
test_data_2 = {
'location': 10,
'world': 303,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
data = [test_data, test_data_2]
resp = self.app.simulate_post('/shooting_stars', json=data, headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_200
rows = self.conn.execute('''SELECT * FROM data''').fetchall()
assert len(rows) == 2
row1, row2 = [dict(r) for r in rows]
assert row1['sharedKey'] == 'testpw'
assert row1['location'] == test_data['location']
assert row1['world'] == test_data['world']
assert row1['minTime'] == test_data['minTime']
assert row1['maxTime'] == test_data['maxTime']
assert row2['sharedKey'] == 'testpw'
assert row2['location'] == test_data_2['location']
assert row2['world'] == test_data_2['world']
assert row2['minTime'] == test_data_2['minTime']
assert row2['maxTime'] == test_data_2['maxTime']
def test_two_same_world(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
test_data_2 = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 600,
'maxTime': FROZEN_UNIX_TIME + 1100
}
data = [test_data, test_data_2]
resp = self.app.simulate_post('/shooting_stars', json=data, headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_200
rows = self.conn.execute('''SELECT * FROM data''').fetchall()
assert len(rows) == 1
assert rows[0]['sharedKey'] == 'testpw'
assert rows[0]['location'] == test_data['location']
assert rows[0]['world'] == test_data['world']
assert rows[0]['minTime'] == test_data_2['minTime']
assert rows[0]['maxTime'] == test_data['maxTime']
def test_update_existing(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
add_data(self.conn, test_data, 'testpw')
test_data_2 = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 600,
'maxTime': FROZEN_UNIX_TIME + 1100
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data_2], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_200
rows = self.conn.execute('''SELECT * FROM data''').fetchall()
assert len(rows) == 1
assert rows[0]['sharedKey'] == 'testpw'
assert rows[0]['location'] == test_data['location']
assert rows[0]['world'] == test_data['world']
assert rows[0]['minTime'] == test_data_2['minTime']
assert rows[0]['maxTime'] == test_data['maxTime']
def test_update_with_recent_old(self):
# This would probably indicate fake data in either data 1 or 2 (or world reset)
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME
}
add_data(self.conn, test_data, 'testpw')
test_data_2 = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + (60*10) - 1,
'maxTime': FROZEN_UNIX_TIME + (60*20)
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data_2], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_200
rows = self.conn.execute('''SELECT * FROM data''').fetchall()
assert len(rows) == 1
assert rows[0]['sharedKey'] == 'testpw'
assert rows[0]['location'] == test_data['location']
assert rows[0]['world'] == test_data['world']
assert rows[0]['minTime'] == test_data['minTime']
assert rows[0]['maxTime'] == test_data['maxTime']
def test_update_with_too_old(self):
# This would probably indicate fake data in either data 1 or 2 (or world reset)
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME
}
add_data(self.conn, test_data, 'testpw')
test_data_2 = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + (60*10),
'maxTime': FROZEN_UNIX_TIME + (60*20)
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data_2], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_200
rows = self.conn.execute('''SELECT * FROM data''').fetchall()
assert len(rows) == 2
row1, row2 = [dict(r) for r in rows]
assert row1['sharedKey'] == 'testpw'
assert row1['location'] == test_data['location']
assert row1['world'] == test_data['world']
assert row1['minTime'] == test_data['minTime']
assert row1['maxTime'] == test_data['maxTime']
assert row2['sharedKey'] == 'testpw'
assert row2['location'] == test_data_2['location']
assert row2['world'] == test_data_2['world']
assert row2['minTime'] == test_data_2['minTime']
assert row2['maxTime'] == test_data_2['maxTime']
def test_validation_fail_no_auth(self):
resp = self.app.simulate_post('/shooting_stars')
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_not_on_either_list(self):
resp = self.app.simulate_post('/shooting_stars', headers={'Authorization': 'testpwa'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_non_alpha_auth(self):
password_based_shooting_stars_resource.scout_pw_whitelist.add('123456')
resp = self.app.simulate_post('/shooting_stars', headers={'Authorization': '123456'})
password_based_shooting_stars_resource.scout_pw_whitelist.remove('123456')
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_alpha_numeric_auth(self):
password_based_shooting_stars_resource.scout_pw_whitelist.add('a1b2c3d4')
resp = self.app.simulate_post('/shooting_stars', headers={'Authorization': 'a1b2c3d4'})
password_based_shooting_stars_resource.scout_pw_whitelist.remove('a1b2c3d4')
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_entry_not_dict(self):
resp = self.app.simulate_post('/shooting_stars', json=['fail'], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_second_entry_not_dict(self):
resp = self.app.simulate_post('/shooting_stars', json=[{'test': 'pass'}, 'fail'], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_missing_location(self):
test_data = {
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_missing_world(self):
test_data = {
'location': 10,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_missing_min_time(self):
test_data = {
'location': 10,
'world': 302,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_missing_max_time(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_location_not_int(self):
test_data = {
'location': '10',
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_world_not_int(self):
test_data = {
'location': 10,
'world': '302',
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_min_time_not_int(self):
test_data = {
'location': 10,
'world': 302,
'minTime': f'{FROZEN_UNIX_TIME + 500}',
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_max_time_not_int(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': f'{FROZEN_UNIX_TIME + 1000}'
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_location_not_valid_negative(self):
test_data = {
'location': -1,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_location_not_valid_too_high(self):
test_data = {
'location': 14,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_world_not_valid(self):
test_data = {
'location': 10,
'world': 30,
'minTime': FROZEN_UNIX_TIME + 500,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_times_too_close(self):
test_data = {
'location': 10,
'world': 30,
'minTime': FROZEN_UNIX_TIME + 881,
'maxTime': FROZEN_UNIX_TIME + 1000
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_times_too_far(self):
test_data = {
'location': 10,
'world': 30,
'minTime': FROZEN_UNIX_TIME,
'maxTime': FROZEN_UNIX_TIME + (60*26) + 1
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_min_greater_than_max(self):
test_data = {
'location': 10,
'world': 30,
'minTime': FROZEN_UNIX_TIME + 1000,
'maxTime': FROZEN_UNIX_TIME
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_too_far_in_the_future(self):
test_data = {
'location': 10,
'world': 30,
'minTime': FROZEN_UNIX_TIME + 1000,
'maxTime': FROZEN_UNIX_TIME + (60*150) + 1
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
def test_validation_fail_in_the_past(self):
test_data = {
'location': 10,
'world': 30,
'minTime': FROZEN_UNIX_TIME - 500,
'maxTime': FROZEN_UNIX_TIME - 50
}
resp = self.app.simulate_post('/shooting_stars', json=[test_data], headers={'Authorization': 'testpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
assert self.conn.execute('''SELECT * FROM data''').fetchall() == []
@freeze_time(FROZEN_TIME)
class TestPasswordShootingStarsResourceGetSeparate(TestCase):
def test_simple(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
add_data(self.conn, test_data, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
correct_response = test_data.copy()
correct_response['password'] = 'testpw'
assert resp.status == falcon.HTTP_200
assert resp.json == [correct_response]
def test_master_pw_data_point(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
add_data(self.conn, test_data, 'masterpw')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
correct_response = test_data.copy()
correct_response['password'] = 'MASTER PASSWORD'
assert resp.status == falcon.HTTP_200
assert resp.json == [correct_response]
def test_multiple_data_points(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
test_data_2 = test_data.copy()
test_data_2['world'] = 304
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
correct_response_1 = test_data.copy()
correct_response_1['password'] = 'testpw'
correct_response_2 = test_data_2.copy()
correct_response_2['password'] = 'testpw'
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 2
assert correct_response_1 in resp.json
assert correct_response_2 in resp.json
def test_multiple_data_points_diff_keys(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
test_data_2 = test_data.copy()
test_data_2['world'] = 304
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'testpw2')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 2
test_data_with_key = test_data.copy()
test_data_with_key['password'] = 'testpw'
test_data_with_key_2 = test_data_2.copy()
test_data_with_key_2['password'] = 'testpw2'
assert test_data_with_key in resp.json
assert test_data_with_key_2 in resp.json
def test_multiple_data_points_diff_keys_same_world(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
test_data_2 = test_data.copy()
test_data_2['minTime'] = FROZEN_UNIX_TIME
test_data_2['maxTime'] = FROZEN_UNIX_TIME + 200
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'testpw2')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 2
test_data_with_key = test_data.copy()
test_data_with_key['password'] = 'testpw'
test_data_with_key_2 = test_data_2.copy()
test_data_with_key_2['password'] = 'testpw2'
assert test_data_with_key in resp.json
assert test_data_with_key_2 in resp.json
def test_out_of_range(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - (100 + (60*60)),
'maxTime': FROZEN_UNIX_TIME - (60*60)
}
add_data(self.conn, test_data, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
assert resp.json == []
def test_edge_of_range(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - (100 + (60*60)),
'maxTime': FROZEN_UNIX_TIME - (60*60) + 1
}
add_data(self.conn, test_data, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 1
data: dict = resp.json[0]
assert data['location'] == 10
assert data['world'] == 302
assert data['minTime'] == FROZEN_UNIX_TIME - (100 + (60*60))
assert data['maxTime'] == FROZEN_UNIX_TIME - (60*60) + 1
def test_simple_with_old_data(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - (100 + (60*60)),
'maxTime': FROZEN_UNIX_TIME - (50 + (60*60))
}
test_data_2 = {
'location': 8,
'world': 302,
'minTime': FROZEN_UNIX_TIME + 100,
'maxTime': FROZEN_UNIX_TIME + 1000
}
add_data(self.conn, test_data, 'testpw')
add_data(self.conn, test_data_2, 'testpw')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
test_data_2_with_key = test_data_2.copy()
test_data_2_with_key['password'] = 'testpw'
assert resp.json == [test_data_2_with_key]
def test_validation_fail_no_auth(self):
resp: falcon.testing.Result = self.app.simulate_get('/audit')
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
def test_validation_allow_non_alpha_auth(self):
password_based_shooting_stars_resource.master_pw_whitelist.add('123456')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': '123456'})
password_based_shooting_stars_resource.master_pw_whitelist.remove('123456')
assert resp.status == falcon.HTTP_200
def test_validation_allow_alpha_numeric_auth(self):
password_based_shooting_stars_resource.master_pw_whitelist.add('a1b2c3d4')
resp: falcon.testing.Result = self.app.simulate_get('/audit', headers={'Authorization': 'a1b2c3d4'})
password_based_shooting_stars_resource.master_pw_whitelist.remove('a1b2c3d4')
assert resp.status == falcon.HTTP_200
class TestPasswordShootingStarsResourceGetWhitelist(TestCase):
def test_simple(self):
resp: falcon.testing.Result = self.app.simulate_get('/whitelist', headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 2
assert 'testpw' in resp.json
assert 'testpw2' in resp.json
def test_master_password_not_returned(self):
password_based_shooting_stars_resource.master_pw_whitelist.add('masterpw')
resp: falcon.testing.Result = self.app.simulate_get('/whitelist', headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_200
assert len(resp.json) == 2
assert 'testpw' in resp.json
assert 'testpw2' in resp.json
assert 'masterpw' not in resp.json
password_based_shooting_stars_resource.master_pw_whitelist.remove('masterpw')
def test_validation_no_auth(self):
resp: falcon.testing.Result = self.app.simulate_get('/whitelist')
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
def test_validation_incorrect_auth(self):
resp: falcon.testing.Result = self.app.simulate_get('/whitelist', headers={'Authorization': 'badpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
class TestPasswordShootingStarsResourcePostWhitelist(TestCase):
def test_simple(self):
resp: falcon.testing.Result = self.app.simulate_post('/whitelist', headers={'Authorization': 'masterpw'}, json={'password': 'testpw3'})
assert resp.status == falcon.HTTP_200
assert 'testpw3' in password_based_shooting_stars_resource.scout_pw_whitelist
assert 'testpw3' not in password_based_shooting_stars_resource.master_pw_whitelist
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw3"''').fetchall()
assert len(rows) == 1
password_based_shooting_stars_resource.scout_pw_whitelist.discard('testpw3')
def test_already_existing_add(self):
password_based_shooting_stars_resource.scout_pw_whitelist.add('testpw3')
resp: falcon.testing.Result = self.app.simulate_post('/whitelist', headers={'Authorization': 'masterpw'}, json={'password': 'testpw3'})
assert resp.status == falcon.HTTP_200
assert 'testpw3' in password_based_shooting_stars_resource.scout_pw_whitelist
assert 'testpw3' not in password_based_shooting_stars_resource.master_pw_whitelist
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw3"''').fetchall()
assert len(rows) == 1
password_based_shooting_stars_resource.scout_pw_whitelist.discard('testpw3')
def test_validation_no_auth(self):
resp: falcon.testing.Result = self.app.simulate_post('/whitelist', json={'password': 'testpw3'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw3"''').fetchall()
assert len(rows) == 0
def test_validation_incorrect_auth(self):
resp: falcon.testing.Result = self.app.simulate_post('/whitelist', headers={'Authorization': 'badpw'}, json={'password': 'testpw3'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw3"''').fetchall()
assert len(rows) == 0
def test_validation_fail_missing_password(self):
resp = self.app.simulate_post('/whitelist', json={}, headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw3"''').fetchall()
assert len(rows) == 0
def test_validation_fail_password_not_str(self):
resp = self.app.simulate_post('/whitelist', json={"password": 1}, headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw3"''').fetchall()
assert len(rows) == 0
class TestPasswordShootingStarsResourceDeleteWhitelist(TestCase):
def test_simple(self):
resp: falcon.testing.Result = self.app.simulate_delete('/whitelist', headers={'Authorization': 'masterpw'}, json={'password': 'testpw2'})
assert resp.status == falcon.HTTP_200
assert 'testpw2' not in password_based_shooting_stars_resource.scout_pw_whitelist
assert 'testpw' in password_based_shooting_stars_resource.scout_pw_whitelist
assert resp.text == 'Successfully removed from whitelist and data cleared'
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw2"''').fetchall()
assert len(rows) == 0
password_based_shooting_stars_resource.scout_pw_whitelist.add('testpw2')
def test_remove_data(self):
test_data = {
'location': 10,
'world': 302,
'minTime': FROZEN_UNIX_TIME - 100,
'maxTime': FROZEN_UNIX_TIME + 100
}
add_data(self.conn, test_data, 'testpw2')
resp: falcon.testing.Result = self.app.simulate_delete('/whitelist', headers={'Authorization': 'masterpw'}, json={'password': 'testpw2'})
assert resp.status == falcon.HTTP_200
assert 'testpw2' not in password_based_shooting_stars_resource.scout_pw_whitelist
assert 'testpw' in password_based_shooting_stars_resource.scout_pw_whitelist
assert resp.text == 'Successfully removed from whitelist and data cleared'
rows = self.conn.execute('''SELECT * FROM data''').fetchall()
assert len(rows) == 0
rows = self.conn.execute('''SELECT password FROM scout_whitelist WHERE password = "testpw2"''').fetchall()
assert len(rows) == 0
password_based_shooting_stars_resource.scout_pw_whitelist.add('testpw2')
def test_not_in_whitelist(self):
resp: falcon.testing.Result = self.app.simulate_delete('/whitelist', headers={'Authorization': 'masterpw'}, json={'password': 'testpw3'})
assert resp.status == falcon.HTTP_200
assert 'testpw2' in password_based_shooting_stars_resource.scout_pw_whitelist
assert 'testpw' in password_based_shooting_stars_resource.scout_pw_whitelist
assert resp.text == 'No such key found in the whitelist'
def test_validation_no_auth(self):
resp: falcon.testing.Result = self.app.simulate_delete('/whitelist', json={'password': 'testpw2'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
def test_validation_incorrect_auth(self):
resp: falcon.testing.Result = self.app.simulate_delete('/whitelist', headers={'Authorization': 'badpw'}, json={'password': 'testpw2'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_AUTHORIZATION_FAIL_SUBMIT}
def test_validation_fail_missing_password(self):
resp = self.app.simulate_delete('/whitelist', json={}, headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
def test_validation_fail_password_not_str(self):
resp = self.app.simulate_delete('/whitelist', json={"password": 1}, headers={'Authorization': 'masterpw'})
assert resp.status == falcon.HTTP_400
assert resp.json == {'title': 'Bad request', 'description': ERROR_MSG_DATA_VALIDATION_FAIL}
| 44.856678
| 145
| 0.636216
| 4,866
| 41,313
| 5.123716
| 0.048089
| 0.057436
| 0.05503
| 0.060003
| 0.916493
| 0.903538
| 0.895275
| 0.893952
| 0.886291
| 0.856971
| 0
| 0.032766
| 0.232445
| 41,313
| 920
| 146
| 44.905435
| 0.753485
| 0.007649
| 0
| 0.747126
| 0
| 0
| 0.171403
| 0
| 0
| 0
| 0
| 0
| 0.298851
| 1
| 0.091954
| false
| 0.0894
| 0.012771
| 0
| 0.114943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
89c84e6f4d4ae3c24bfdf71bca7d63008a96724c
| 2,533
|
py
|
Python
|
neuro_config_client/__init__.py
|
neuro-inc/neuro-config-client
|
29dae0be0ce2238e1f5be0250fc810156ec53fee
|
[
"Apache-2.0"
] | null | null | null |
neuro_config_client/__init__.py
|
neuro-inc/neuro-config-client
|
29dae0be0ce2238e1f5be0250fc810156ec53fee
|
[
"Apache-2.0"
] | 3
|
2022-02-21T16:44:50.000Z
|
2022-03-31T15:08:14.000Z
|
neuro_config_client/__init__.py
|
neuro-inc/neuro-config-client
|
29dae0be0ce2238e1f5be0250fc810156ec53fee
|
[
"Apache-2.0"
] | null | null | null |
"""Platform config client."""
from pkg_resources import get_distribution
from .client import ConfigClient
from .entities import (
ACMEEnvironment,
ARecord,
AWSCloudProvider,
AWSCredentials,
AWSStorage,
AzureCloudProvider,
AzureCredentials,
AzureReplicationType,
AzureStorage,
AzureStorageTier,
BucketsConfig,
CloudProvider,
CloudProviderType,
Cluster,
ClusterLocationType,
ClusterStatus,
CredentialsConfig,
DisksConfig,
DNSConfig,
DockerRegistryConfig,
EFSPerformanceMode,
EFSThroughputMode,
EMCECSCredentials,
GoogleCloudProvider,
GoogleFilestoreTier,
GoogleStorage,
GrafanaCredentials,
HelmRegistryConfig,
IdleJobConfig,
IngressConfig,
MetricsConfig,
MinioCredentials,
MonitoringConfig,
NeuroAuthConfig,
NodePool,
NodeRole,
NotificationType,
OnPremCloudProvider,
OpenStackCredentials,
OrchestratorConfig,
RegistryConfig,
ResourcePoolType,
ResourcePreset,
Resources,
SecretsConfig,
SentryCredentials,
StorageConfig,
StorageInstance,
TPUPreset,
TPUResource,
VCDCloudProvider,
VCDCredentials,
VCDStorage,
VolumeConfig,
)
__all__ = [
"ConfigClient",
"ACMEEnvironment",
"ARecord",
"AWSCloudProvider",
"AWSCredentials",
"AWSStorage",
"AzureCloudProvider",
"AzureCredentials",
"AzureReplicationType",
"AzureStorage",
"AzureStorageTier",
"BucketsConfig",
"CloudProvider",
"CloudProviderType",
"Cluster",
"ClusterLocationType",
"ClusterStatus",
"CredentialsConfig",
"DisksConfig",
"DNSConfig",
"DockerRegistryConfig",
"EFSPerformanceMode",
"EFSThroughputMode",
"EMCECSCredentials",
"GoogleCloudProvider",
"GoogleFilestoreTier",
"GoogleStorage",
"GrafanaCredentials",
"HelmRegistryConfig",
"IdleJobConfig",
"IngressConfig",
"MetricsConfig",
"MinioCredentials",
"MonitoringConfig",
"NeuroAuthConfig",
"NodePool",
"NodeRole",
"NotificationType",
"OnPremCloudProvider",
"OpenStackCredentials",
"OrchestratorConfig",
"RegistryConfig",
"ResourcePoolType",
"ResourcePreset",
"Resources",
"SecretsConfig",
"SentryCredentials",
"StorageConfig",
"StorageInstance",
"TPUPreset",
"TPUResource",
"VCDCloudProvider",
"VCDCredentials",
"VCDStorage",
"VolumeConfig",
]
__version__ = get_distribution(__name__).version
| 21.108333
| 48
| 0.681405
| 131
| 2,533
| 13.061069
| 0.51145
| 0.017534
| 0.044418
| 0.060783
| 0.911748
| 0.911748
| 0.911748
| 0.911748
| 0.911748
| 0.911748
| 0
| 0
| 0.22503
| 2,533
| 119
| 49
| 21.285714
| 0.871625
| 0.00908
| 0
| 0
| 0
| 0
| 0.316294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.025862
| 0
| 0.025862
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
89d31149e3987c8f577455e47a759d5cce4595c0
| 138
|
py
|
Python
|
syft/messaging/plan/__init__.py
|
Rishav1/PySyft
|
f620ee12727b52b19a317f263789830b57ee2539
|
[
"Apache-2.0"
] | 2
|
2019-05-29T13:09:02.000Z
|
2019-06-14T17:40:51.000Z
|
syft/messaging/plan/__init__.py
|
Rishav1/PySyft
|
f620ee12727b52b19a317f263789830b57ee2539
|
[
"Apache-2.0"
] | 3
|
2019-05-24T01:16:56.000Z
|
2019-09-18T13:02:30.000Z
|
syft/messaging/plan/__init__.py
|
Rishav1/PySyft
|
f620ee12727b52b19a317f263789830b57ee2539
|
[
"Apache-2.0"
] | 1
|
2022-03-12T08:04:34.000Z
|
2022-03-12T08:04:34.000Z
|
from syft.messaging.plan.plan import func2plan
from syft.messaging.plan.plan import method2plan
from syft.messaging.plan.plan import Plan
| 34.5
| 48
| 0.847826
| 21
| 138
| 5.571429
| 0.333333
| 0.205128
| 0.435897
| 0.538462
| 0.794872
| 0.794872
| 0
| 0
| 0
| 0
| 0
| 0.015873
| 0.086957
| 138
| 3
| 49
| 46
| 0.912698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
617196bd3efd84b9c9f8d2194c43bb5e8eb3bd0a
| 3,610
|
py
|
Python
|
projetos/migrations/0003_auto_20171201_1758.py
|
danieldourado/camara-mirim-crawler
|
a00476a6b85acd788265f1be05e5c4dff7c63226
|
[
"MIT"
] | null | null | null |
projetos/migrations/0003_auto_20171201_1758.py
|
danieldourado/camara-mirim-crawler
|
a00476a6b85acd788265f1be05e5c4dff7c63226
|
[
"MIT"
] | 21
|
2019-12-26T16:41:19.000Z
|
2022-03-21T22:16:22.000Z
|
projetos/migrations/0003_auto_20171201_1758.py
|
danieldourado/plenarinho-util
|
a00476a6b85acd788265f1be05e5c4dff7c63226
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-12-01 17:58
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projetos', '0002_projeto_nomedacrianca'),
]
operations = [
migrations.AddField(
model_name='projeto',
name='ano',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='cep',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='cidade',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='dataDeNascimento',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='email',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='endereco',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='escola',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='justificativa',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='nomeDoProjetoDeLei',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='nomeDosPais',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='projetoDeLei',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='serie',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='sexo',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='telefone',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='tema',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='projeto',
name='uf',
field=models.CharField(default='test', max_length=255),
preserve_default=False,
),
]
| 32.232143
| 67
| 0.551801
| 324
| 3,610
| 5.978395
| 0.179012
| 0.148684
| 0.189985
| 0.223025
| 0.83428
| 0.83428
| 0.814662
| 0.814662
| 0.814662
| 0.814662
| 0
| 0.028619
| 0.332133
| 3,610
| 111
| 68
| 32.522523
| 0.774782
| 0.018837
| 0
| 0.769231
| 1
| 0
| 0.094377
| 0.007347
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.048077
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
61c2680aeaf1736eb32a2e9db6452d6fee77664f
| 4,925
|
py
|
Python
|
ppn/_data_labeling.py
|
tilleyd/point-proposal-net
|
3731984046cd56101238de30a441d610b79bc8b0
|
[
"MIT"
] | null | null | null |
ppn/_data_labeling.py
|
tilleyd/point-proposal-net
|
3731984046cd56101238de30a441d610b79bc8b0
|
[
"MIT"
] | null | null | null |
ppn/_data_labeling.py
|
tilleyd/point-proposal-net
|
3731984046cd56101238de30a441d610b79bc8b0
|
[
"MIT"
] | null | null | null |
# ppn._data_labeling
# author: Duncan Tilley
def get_anchors(config):
"""
Generates the anchor coordinates for a given image dimension.
config
The configuration dictionary. See ppn.config.ppn_config.
"""
import numpy as np
# create the anchor positions
img_size = config['image_size']
feature_size = config['feature_size']
step = img_size / feature_size
half_step = step * 0.5
x = np.arange(half_step, img_size, step, dtype=np.float32)
y = x.copy()
return np.transpose([np.tile(x, len(y)), np.repeat(y, len(x))])
def get_anchor_labels(anchors, coords, config):
"""
Generates the anchor labels for tranining the PPN.
Returns y_conf, y_reg.
anchors
The list of anchor coordinates generated from get_anchors().
coords
The list of ground truth point coordinates.
config
The configuration dictionary. See ppn.config.ppn_config.
"""
import numpy as np
r_near = config['r_near']
r_far = config['r_far']
img_size = config['image_size']
feature_size = config['feature_size']
step = img_size / feature_size
halfstep = step * 0.5
y_conf = np.full(anchors.shape[0], -1, dtype=np.int8)
y_reg = np.zeros(anchors.shape)
# For each point, find the nearest anchor and calculate the distance.
# This ensures that most points have an associated anchor.
for (x, y) in coords:
x_norm = (x - halfstep) / step
y_norm = (y - halfstep) / step
r = int(np.round(y_norm))
c = int(np.round(x_norm))
anchor_index = r * feature_size + c
y_conf[anchor_index] = 1
y_reg[anchor_index][0] = (x - anchors[anchor_index][0]) / step
y_reg[anchor_index][1] = (y - anchors[anchor_index][1]) / step
# for each anchor, calculate the distances to each point
count = 0
for i in range(0, len(anchors)):
x, y = anchors[i]
x /= step
y /= step
distances = []
for (px, py) in coords:
px /= step
py /= step
distances.append(np.sqrt((x-px)**2 + (y-py)**2))
if len(distances) > 0:
near = np.argmin(distances)
dist = distances[near]
if dist <= r_near:
y_conf[i] = 1
px, py = coords[near]
px /= step
py /= step
y_reg[i][0] = (px - x)
y_reg[i][1] = (py - y)
elif dist > r_far:
y_conf[i] = 0
# reshape for use in PPN training
y_conf = np.reshape(y_conf, (feature_size, feature_size))
y_reg = np.reshape(y_reg, (feature_size, feature_size) + (2,))
return y_conf, y_reg
def get_fake_prediction(anchors, coords, config):
"""
Generates the anchor labels with random noise to fake a good prediction.
Used for testing non-model related code.
anchors
The list of anchor coordinates generated from get_anchors().
coords
The list of ground truth point coordinates.
config
The configuration dictionary. See ppn.config.ppn_config.
"""
import numpy as np
r_near = config['r_near']
r_far = config['r_far']
img_size = config['image_size']
feature_size = config['feature_size']
def noise():
return np.random.normal(loc=0.0, scale=0.1)
step = img_size / feature_size
halfstep = step * 0.5
y_conf = np.full(anchors.shape[0], 0.0, dtype=np.int8)
y_reg = np.zeros(anchors.shape)
# For each point, find the nearest anchor and calculate the distance.
# This ensures that most points have an associated anchor.
for (x, y) in coords:
x_norm = (x - halfstep) / step
y_norm = (y - halfstep) / step
r = int(np.round(y_norm))
c = int(np.round(x_norm))
anchor_index = r * feature_size + c
y_conf[anchor_index] = 1
y_reg[anchor_index][0] = nosie() + (x - anchors[anchor_index][0]) / step
y_reg[anchor_index][1] = noise() + (y - anchors[anchor_index][1]) / step
# for each anchor, calculate the distances to each point
for i in range(0, len(anchors)):
x, y = anchors[i]
x /= step
y /= step
distances = []
for (px, py) in coords:
px /= step
py /= step
distances.append(np.sqrt((x-px)**2 + (y-py)**2))
near = np.argmin(distances)
dist = distances[near]
if dist <= r_near:
y_conf[i] = 1
px, py = coords[near]
px /= step
py /= step
y_reg[i][0] = noise() + (px - x)
y_reg[i][1] = noise() + (py - y)
elif dist > r_far:
y_conf[i] = 0
# reshape for use in PPN training
y_conf = np.reshape(y_conf, (feature_size, feature_size))
y_reg = np.reshape(y_reg, (feature_size, feature_size) + (2,))
return y_conf, y_reg
| 31.774194
| 80
| 0.58132
| 704
| 4,925
| 3.914773
| 0.171875
| 0.075835
| 0.054427
| 0.021771
| 0.834543
| 0.834543
| 0.828012
| 0.796807
| 0.796807
| 0.796807
| 0
| 0.013715
| 0.304162
| 4,925
| 154
| 81
| 31.980519
| 0.790487
| 0.245076
| 0
| 0.783505
| 1
| 0
| 0.024458
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041237
| false
| 0
| 0.030928
| 0.010309
| 0.113402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f623f757acec15c8ae768e444030e06177960528
| 6,223
|
py
|
Python
|
tests/test_scheduler.py
|
aronianm/flask-apscheduler
|
6fb4e7646fa9d2a8f337f2b965404ce074a589b1
|
[
"Apache-2.0"
] | 942
|
2015-05-13T00:12:31.000Z
|
2022-03-31T05:20:41.000Z
|
tests/test_scheduler.py
|
aronianm/flask-apscheduler
|
6fb4e7646fa9d2a8f337f2b965404ce074a589b1
|
[
"Apache-2.0"
] | 188
|
2015-06-29T14:02:34.000Z
|
2022-03-27T15:34:52.000Z
|
tests/test_scheduler.py
|
aronianm/flask-apscheduler
|
6fb4e7646fa9d2a8f337f2b965404ce074a589b1
|
[
"Apache-2.0"
] | 210
|
2015-05-22T02:02:19.000Z
|
2022-03-27T02:48:05.000Z
|
from flask import Flask
from flask_apscheduler import APScheduler, utils
from unittest import TestCase
import apscheduler
from pytz import utc
import datetime
class TestScheduler(TestCase):
def setUp(self):
self.app = Flask(__name__)
self.scheduler = APScheduler()
self.scheduler_two = APScheduler(app=self.app)
def test_running(self):
self.assertFalse(self.scheduler.running)
self.scheduler.start()
self.assertTrue(self.scheduler.running)
def test_start_with_allowed_hosts(self):
self.app.config['SCHEDULER_ALLOWED_HOSTS'] = ['any_server_name']
self.scheduler.init_app(self.app)
self.scheduler.start()
self.assertFalse(self.scheduler.running)
def test_start_without_allowed_hosts(self):
self.app.config['SCHEDULER_ALLOWED_HOSTS'] = []
self.scheduler.init_app(self.app)
self.scheduler.start()
self.assertFalse(self.scheduler.running)
def test_shutdown(self):
self.scheduler.init_app(self.app)
self.scheduler.start()
self.assertTrue(self.scheduler.running)
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_load_jobs_from_config(self):
self.app.config['JOBS'] = [
{
'id': 'job1',
'func': 'tests.test_api:job1',
'trigger': 'interval',
'seconds': 10,
}
]
self.app.config['SCHEDULER_JOBSTORES'] = {"default": apscheduler.jobstores.memory.MemoryJobStore()}
self.app.config['SCHEDULER_EXECUTORS'] = {"default": {"type": "threadpool"}}
self.app.config['SCHEDULER_JOB_DEFAULTS'] = {"coalesce": True}
self.app.config['SCHEDULER_TIMEZONE'] = utc
self.scheduler.init_app(app=self.app)
job = self.scheduler.get_job('job1')
self.assertIsNotNone(job)
def test_task_decorator(self):
@self.scheduler.task('interval', seconds=10, id='job1')
def decorated_job():
pass
job = self.scheduler.get_job('job1')
self.assertIsNotNone(job)
def test_state_prop(self):
self.scheduler.init_app(self.app)
self.scheduler.start()
self.assertTrue(self.scheduler.state)
self.scheduler.shutdown()
self.assertFalse(self.scheduler.state)
def test_scheduler_prop(self):
self.scheduler.init_app(self.app)
self.scheduler.start()
self.assertIsNotNone(self.scheduler.scheduler)
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_pause_resume(self):
self.scheduler.init_app(self.app)
self.scheduler.start()
self.assertTrue(self.scheduler.running)
self.scheduler.pause()
self.assertTrue(self.scheduler.state == 2)
self.scheduler.resume()
self.assertTrue(self.scheduler.state == 1)
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_add_listener(self):
self.scheduler.init_app(self.app)
self.scheduler.start()
self.assertTrue(self.scheduler.running)
self.scheduler.add_listener(None)
self.scheduler.remove_listener(None)
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_add_remove_job(self):
@self.scheduler.task('interval', seconds=10, id='job1')
def decorated_job():
pass
self.scheduler.init_app(self.app)
self.scheduler.start()
job = self.scheduler.get_job('job1')
self.assertIsNotNone(job)
self.scheduler.remove_job('job1')
self.assertFalse(self.scheduler.get_job('job1'))
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_add_delete_job(self):
@self.scheduler.task('interval', seconds=10, id='job1')
def decorated_job():
pass
self.scheduler.init_app(self.app)
self.scheduler.start()
job = self.scheduler.get_job('job1')
self.assertIsNotNone(job)
self.scheduler.delete_job('job1')
self.assertFalse(self.scheduler.get_job('job1'))
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_add_remove_all_jobs(self):
@self.scheduler.task('interval', hours=1, id='job1')
def decorated_job():
pass
@self.scheduler.task('interval', hours=1, id='job2')
def decorated_job2():
pass
self.scheduler.init_app(self.app)
self.scheduler.start()
jobs = self.scheduler.get_jobs()
self.assertTrue(len(jobs) == 2)
self.scheduler.remove_all_jobs()
self.assertFalse(self.scheduler.get_job('job1'))
self.assertFalse(self.scheduler.get_job('job2'))
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_add_delete_all_jobs(self):
@self.scheduler.task('interval', hours=1, id='job1')
def decorated_job():
pass
@self.scheduler.task('interval', hours=1, id='job2')
def decorated_job2():
pass
self.scheduler.init_app(self.app)
self.scheduler.start()
jobs = self.scheduler.get_jobs()
self.assertTrue(len(jobs) == 2)
self.scheduler.delete_all_jobs()
self.assertFalse(self.scheduler.get_job('job1'))
self.assertFalse(self.scheduler.get_job('job2'))
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def test_job_to_dict(self):
@self.scheduler.task('interval', hours=1, id='job1', end_date=datetime.datetime.now(), weeks=1, days=1, seconds=99)
def decorated_job():
pass
self.scheduler.init_app(self.app)
self.scheduler.start()
job = self.scheduler.get_job('job1')
self.assertIsNotNone(job)
self.assertTrue(len(utils.job_to_dict(job)))
self.scheduler.delete_job('job1')
self.assertFalse(self.scheduler.get_job('job1'))
self.scheduler.shutdown()
self.assertFalse(self.scheduler.running)
def job1():
pass
| 32.752632
| 123
| 0.639242
| 718
| 6,223
| 5.392758
| 0.118384
| 0.302169
| 0.098141
| 0.144628
| 0.766787
| 0.739928
| 0.735537
| 0.722882
| 0.709452
| 0.673037
| 0
| 0.010314
| 0.236542
| 6,223
| 189
| 124
| 32.925926
| 0.804673
| 0
| 0
| 0.636364
| 0
| 0
| 0.062028
| 0.010927
| 0
| 0
| 0
| 0
| 0.233766
| 1
| 0.162338
| false
| 0.058442
| 0.038961
| 0
| 0.207792
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
9c8a82db5fb12ea511a325970aabc40c65bdb349
| 688
|
py
|
Python
|
tianqiai/api_resources/__init__.py
|
shanjiecai/zhipuai-python
|
757ad04769b3c50b52460fd46ff40028e095e71d
|
[
"MIT"
] | null | null | null |
tianqiai/api_resources/__init__.py
|
shanjiecai/zhipuai-python
|
757ad04769b3c50b52460fd46ff40028e095e71d
|
[
"MIT"
] | null | null | null |
tianqiai/api_resources/__init__.py
|
shanjiecai/zhipuai-python
|
757ad04769b3c50b52460fd46ff40028e095e71d
|
[
"MIT"
] | null | null | null |
# from tianqiai.api_resources.answer import Answer # noqa: F401
# from tianqiai.api_resources.classification import Classification # noqa: F401
from tianqiai.api_resources.completion import Completion # noqa: F401
# from tianqiai.api_resources.embedding import Embedding # noqa: F401
# from tianqiai.api_resources.engine import Engine # noqa: F401
# from tianqiai.api_resources.error_object import ErrorObject # noqa: F401
# from tianqiai.api_resources.file import File # noqa: F401
# from tianqiai.api_resources.fine_tune import FineTune # noqa: F401
# from tianqiai.api_resources.model import Model # noqa: F401
# from tianqiai.api_resources.search import Search # noqa: F401
| 62.545455
| 80
| 0.799419
| 92
| 688
| 5.847826
| 0.228261
| 0.223048
| 0.27881
| 0.446097
| 0.535316
| 0.535316
| 0
| 0
| 0
| 0
| 0
| 0.050083
| 0.12936
| 688
| 10
| 81
| 68.8
| 0.84808
| 0.872093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9c8e07d3da814b2c1f5e959f17aae7f67ab38586
| 15,371
|
py
|
Python
|
sdk/python/pulumi_alicloud/log/dashboard.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/log/dashboard.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/log/dashboard.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['DashboardArgs', 'Dashboard']
@pulumi.input_type
class DashboardArgs:
def __init__(__self__, *,
char_list: pulumi.Input[str],
dashboard_name: pulumi.Input[str],
project_name: pulumi.Input[str],
display_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Dashboard resource.
:param pulumi.Input[str] char_list: Configuration of charts in the dashboard.
:param pulumi.Input[str] dashboard_name: The name of the Log Dashboard.
:param pulumi.Input[str] project_name: The name of the log project. It is the only in one Alicloud account.
:param pulumi.Input[str] display_name: Dashboard alias.
"""
pulumi.set(__self__, "char_list", char_list)
pulumi.set(__self__, "dashboard_name", dashboard_name)
pulumi.set(__self__, "project_name", project_name)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
@property
@pulumi.getter(name="charList")
def char_list(self) -> pulumi.Input[str]:
"""
Configuration of charts in the dashboard.
"""
return pulumi.get(self, "char_list")
@char_list.setter
def char_list(self, value: pulumi.Input[str]):
pulumi.set(self, "char_list", value)
@property
@pulumi.getter(name="dashboardName")
def dashboard_name(self) -> pulumi.Input[str]:
"""
The name of the Log Dashboard.
"""
return pulumi.get(self, "dashboard_name")
@dashboard_name.setter
def dashboard_name(self, value: pulumi.Input[str]):
pulumi.set(self, "dashboard_name", value)
@property
@pulumi.getter(name="projectName")
def project_name(self) -> pulumi.Input[str]:
"""
The name of the log project. It is the only in one Alicloud account.
"""
return pulumi.get(self, "project_name")
@project_name.setter
def project_name(self, value: pulumi.Input[str]):
pulumi.set(self, "project_name", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Dashboard alias.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@pulumi.input_type
class _DashboardState:
def __init__(__self__, *,
char_list: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Dashboard resources.
:param pulumi.Input[str] char_list: Configuration of charts in the dashboard.
:param pulumi.Input[str] dashboard_name: The name of the Log Dashboard.
:param pulumi.Input[str] display_name: Dashboard alias.
:param pulumi.Input[str] project_name: The name of the log project. It is the only in one Alicloud account.
"""
if char_list is not None:
pulumi.set(__self__, "char_list", char_list)
if dashboard_name is not None:
pulumi.set(__self__, "dashboard_name", dashboard_name)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if project_name is not None:
pulumi.set(__self__, "project_name", project_name)
@property
@pulumi.getter(name="charList")
def char_list(self) -> Optional[pulumi.Input[str]]:
"""
Configuration of charts in the dashboard.
"""
return pulumi.get(self, "char_list")
@char_list.setter
def char_list(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "char_list", value)
@property
@pulumi.getter(name="dashboardName")
def dashboard_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Log Dashboard.
"""
return pulumi.get(self, "dashboard_name")
@dashboard_name.setter
def dashboard_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dashboard_name", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
Dashboard alias.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="projectName")
def project_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the log project. It is the only in one Alicloud account.
"""
return pulumi.get(self, "project_name")
@project_name.setter
def project_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_name", value)
class Dashboard(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
char_list: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
The dashboard is a real-time data analysis platform provided by the log service. You can display frequently used query and analysis statements in the form of charts and save statistical charts to the dashboard.
[Refer to details](https://www.alibabacloud.com/help/doc-detail/102530.htm).
> **NOTE:** Available in 1.86.0
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_project = alicloud.log.Project("defaultProject", description="tf unit test")
default_store = alicloud.log.Store("defaultStore",
project="tf-project",
retention_period=3000,
shard_count=1)
example = alicloud.log.Dashboard("example",
char_list=\"\"\" [
{
"title":"new_title",
"type":"map",
"search":{
"logstore":"tf-logstore",
"topic":"new_topic",
"query":"* | SELECT COUNT(name) as ct_name, COUNT(product) as ct_product, name,product GROUP BY name,product",
"start":"-86400s",
"end":"now"
},
"display":{
"xAxis":[
"ct_name"
],
"yAxis":[
"ct_product"
],
"xPos":0,
"yPos":0,
"width":10,
"height":12,
"displayName":"xixihaha911"
}
}
]
\"\"\",
dashboard_name="tf-dashboard",
project_name="tf-project")
```
## Import
Log Dashboard can be imported using the id or name, e.g.
```sh
$ pulumi import alicloud:log/dashboard:Dashboard example tf-project:tf-logstore:tf-dashboard
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] char_list: Configuration of charts in the dashboard.
:param pulumi.Input[str] dashboard_name: The name of the Log Dashboard.
:param pulumi.Input[str] display_name: Dashboard alias.
:param pulumi.Input[str] project_name: The name of the log project. It is the only in one Alicloud account.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DashboardArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The dashboard is a real-time data analysis platform provided by the log service. You can display frequently used query and analysis statements in the form of charts and save statistical charts to the dashboard.
[Refer to details](https://www.alibabacloud.com/help/doc-detail/102530.htm).
> **NOTE:** Available in 1.86.0
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_project = alicloud.log.Project("defaultProject", description="tf unit test")
default_store = alicloud.log.Store("defaultStore",
project="tf-project",
retention_period=3000,
shard_count=1)
example = alicloud.log.Dashboard("example",
char_list=\"\"\" [
{
"title":"new_title",
"type":"map",
"search":{
"logstore":"tf-logstore",
"topic":"new_topic",
"query":"* | SELECT COUNT(name) as ct_name, COUNT(product) as ct_product, name,product GROUP BY name,product",
"start":"-86400s",
"end":"now"
},
"display":{
"xAxis":[
"ct_name"
],
"yAxis":[
"ct_product"
],
"xPos":0,
"yPos":0,
"width":10,
"height":12,
"displayName":"xixihaha911"
}
}
]
\"\"\",
dashboard_name="tf-dashboard",
project_name="tf-project")
```
## Import
Log Dashboard can be imported using the id or name, e.g.
```sh
$ pulumi import alicloud:log/dashboard:Dashboard example tf-project:tf-logstore:tf-dashboard
```
:param str resource_name: The name of the resource.
:param DashboardArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DashboardArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
char_list: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DashboardArgs.__new__(DashboardArgs)
if char_list is None and not opts.urn:
raise TypeError("Missing required property 'char_list'")
__props__.__dict__["char_list"] = char_list
if dashboard_name is None and not opts.urn:
raise TypeError("Missing required property 'dashboard_name'")
__props__.__dict__["dashboard_name"] = dashboard_name
__props__.__dict__["display_name"] = display_name
if project_name is None and not opts.urn:
raise TypeError("Missing required property 'project_name'")
__props__.__dict__["project_name"] = project_name
super(Dashboard, __self__).__init__(
'alicloud:log/dashboard:Dashboard',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
char_list: Optional[pulumi.Input[str]] = None,
dashboard_name: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None) -> 'Dashboard':
"""
Get an existing Dashboard resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] char_list: Configuration of charts in the dashboard.
:param pulumi.Input[str] dashboard_name: The name of the Log Dashboard.
:param pulumi.Input[str] display_name: Dashboard alias.
:param pulumi.Input[str] project_name: The name of the log project. It is the only in one Alicloud account.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DashboardState.__new__(_DashboardState)
__props__.__dict__["char_list"] = char_list
__props__.__dict__["dashboard_name"] = dashboard_name
__props__.__dict__["display_name"] = display_name
__props__.__dict__["project_name"] = project_name
return Dashboard(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="charList")
def char_list(self) -> pulumi.Output[str]:
"""
Configuration of charts in the dashboard.
"""
return pulumi.get(self, "char_list")
@property
@pulumi.getter(name="dashboardName")
def dashboard_name(self) -> pulumi.Output[str]:
"""
The name of the Log Dashboard.
"""
return pulumi.get(self, "dashboard_name")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[Optional[str]]:
"""
Dashboard alias.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="projectName")
def project_name(self) -> pulumi.Output[str]:
"""
The name of the log project. It is the only in one Alicloud account.
"""
return pulumi.get(self, "project_name")
| 37.859606
| 218
| 0.601002
| 1,731
| 15,371
| 5.10803
| 0.123628
| 0.069668
| 0.085501
| 0.067179
| 0.823117
| 0.795634
| 0.789753
| 0.747455
| 0.727211
| 0.709342
| 0
| 0.00543
| 0.293149
| 15,371
| 405
| 219
| 37.953086
| 0.808376
| 0.369917
| 0
| 0.602273
| 1
| 0
| 0.10722
| 0.003851
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153409
| false
| 0.005682
| 0.028409
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
140d5d71cab6b2ce97539bc906762d42df335f04
| 650
|
py
|
Python
|
calculo.hipotenusa.py
|
wcalazans81/Exercicios-Python
|
d9abb9505bbf9151d3515cc9ca5b9bd32435699e
|
[
"MIT"
] | null | null | null |
calculo.hipotenusa.py
|
wcalazans81/Exercicios-Python
|
d9abb9505bbf9151d3515cc9ca5b9bd32435699e
|
[
"MIT"
] | null | null | null |
calculo.hipotenusa.py
|
wcalazans81/Exercicios-Python
|
d9abb9505bbf9151d3515cc9ca5b9bd32435699e
|
[
"MIT"
] | null | null | null |
#co = float(input('Digite o comprimento do cateto opsto: '))
#ca = float(input('Digite o comprimento do cateto adjacente: '))
#hi = (co **2 + ca**2) **(1/2)
#print('A hipotenusa vai medir {:.2f} cm'.format(hi))
#import math
#co = float(input('Digite o comprimento do cateto opsto: '))
#ca = float(input('Digite o comprimento do cateto adjacente: '))
#hi = math.hypot(co, ca)
#print('A hipotenusa vai medir {:.2f} cm'.format(hi))
from math import hypot
co = float(input('Digite o comprimento do cateto opsto: '))
ca = float(input('Digite o comprimento do cateto adjacente: '))
hi = hypot(co, ca)
print('A hipotenusa vai medir {:.2f} cm'.format(hi))
| 46.428571
| 64
| 0.681538
| 103
| 650
| 4.300971
| 0.252427
| 0.13544
| 0.216704
| 0.230248
| 0.907449
| 0.907449
| 0.907449
| 0.907449
| 0.907449
| 0.826185
| 0
| 0.012613
| 0.146154
| 650
| 14
| 65
| 46.428571
| 0.785586
| 0.635385
| 0
| 0
| 0
| 0
| 0.49345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.2
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
14368cd8fe61dc75b8921741b2909c48d5bbe65b
| 108,989
|
py
|
Python
|
generated/azure-cli/apim/custom.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
generated/azure-cli/apim/custom.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
generated/azure-cli/apim/custom.py
|
audevbot/autorest.devops.debug
|
a507fb6e2dd7826212537f27d583f203aac1c28f
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
# pylint: disable=too-many-statements
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=unused-argument
import json
# module equivalent: azure_rm_apimanagementapi
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}
def create_apim_api(cmd, client,
resource_group,
service_name,
api_id,
path,
description=None,
authentication_settings=None,
subscription_key_parameter_names=None,
_type=None,
api_revision=None,
api_version=None,
is_current=None,
api_revision_description=None,
api_version_description=None,
api_version_set_id=None,
subscription_required=None,
source_api_id=None,
display_name=None,
service_url=None,
protocols=None,
api_version_set=None,
value=None,
_format=None,
wsdl_selector=None,
api_type=None):
body = {}
body['description'] = description # str
body['authentication_settings'] = json.loads(authentication_settings) if isinstance(authentication_settings, str) else authentication_settings
body['subscription_key_parameter_names'] = json.loads(subscription_key_parameter_names) if isinstance(subscription_key_parameter_names, str) else subscription_key_parameter_names
body['type'] = _type # str
body['api_revision'] = api_revision # str
body['api_version'] = api_version # str
body['is_current'] = is_current # boolean
body['api_revision_description'] = api_revision_description # str
body['api_version_description'] = api_version_description # str
body['api_version_set_id'] = api_version_set_id # str
body['subscription_required'] = subscription_required # boolean
body['source_api_id'] = source_api_id # str
body['display_name'] = display_name # str
body['service_url'] = service_url # str
body['path'] = path # str
body['protocols'] = json.loads(protocols) if isinstance(protocols, str) else protocols
body['api_version_set'] = json.loads(api_version_set) if isinstance(api_version_set, str) else api_version_set
body['value'] = value # str
body['format'] = _format # str
body['wsdl_selector'] = json.loads(wsdl_selector) if isinstance(wsdl_selector, str) else wsdl_selector
body['api_type'] = api_type # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, parameters=body)
# module equivalent: azure_rm_apimanagementapi
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}
def update_apim_api(cmd, client, body,
resource_group,
service_name,
api_id,
path,
description=None,
authentication_settings=None,
subscription_key_parameter_names=None,
_type=None,
api_revision=None,
api_version=None,
is_current=None,
api_revision_description=None,
api_version_description=None,
api_version_set_id=None,
subscription_required=None,
source_api_id=None,
display_name=None,
service_url=None,
protocols=None,
api_version_set=None,
value=None,
_format=None,
wsdl_selector=None,
api_type=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id).as_dict()
body.description = description # str
body.authentication_settings = json.loads(authentication_settings) if isinstance(authentication_settings, str) else authentication_settings
body.subscription_key_parameter_names = json.loads(subscription_key_parameter_names) if isinstance(subscription_key_parameter_names, str) else subscription_key_parameter_names
body.type = _type # str
body.api_revision = api_revision # str
body.api_version = api_version # str
body.is_current = is_current # boolean
body.api_revision_description = api_revision_description # str
body.api_version_description = api_version_description # str
body.api_version_set_id = api_version_set_id # str
body.subscription_required = subscription_required # boolean
body.source_api_id = source_api_id # str
body.display_name = display_name # str
body.service_url = service_url # str
body.path = path # str
body.protocols = json.loads(protocols) if isinstance(protocols, str) else protocols
body.api_version_set = json.loads(api_version_set) if isinstance(api_version_set, str) else api_version_set
body.value = value # str
body.format = _format # str
body.wsdl_selector = json.loads(wsdl_selector) if isinstance(wsdl_selector, str) else wsdl_selector
body.api_type = api_type # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, parameters=body)
# module equivalent: azure_rm_apimanagementapi
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}
def list_apim_api(cmd, client,
resource_group,
service_name):
if resource_group is not None and service_name is not None:
return client.list_by_tags(resource_group_name=resource_group, service_name=service_name)
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementapirelease
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/releases/{{ release_name }}
def create_apim_api_release(cmd, client,
resource_group,
service_name,
api_id,
release_id,
notes=None):
body = {}
body['notes'] = notes # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, release_id=release_id, parameters=body)
# module equivalent: azure_rm_apimanagementapirelease
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/releases/{{ release_name }}
def update_apim_api_release(cmd, client, body,
resource_group,
service_name,
api_id,
release_id,
notes=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, release_id=release_id).as_dict()
body.notes = notes # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, release_id=release_id, parameters=body)
# module equivalent: azure_rm_apimanagementapirelease
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/releases/{{ release_name }}
def list_apim_api_release(cmd, client,
resource_group,
service_name,
api_id):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name, api_id=api_id)
# module equivalent: azure_rm_apimanagementapioperation
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/operations/{{ operation_name }}
def create_apim_api_operation(cmd, client,
resource_group,
service_name,
api_id,
operation_id,
display_name,
method,
url_template,
template_parameters=None,
description=None,
request=None,
responses=None,
policies=None):
body = {}
body['template_parameters'] = json.loads(template_parameters) if isinstance(template_parameters, str) else template_parameters
body['description'] = description # str
body['request'] = json.loads(request) if isinstance(request, str) else request
body['responses'] = json.loads(responses) if isinstance(responses, str) else responses
body['policies'] = policies # str
body['display_name'] = display_name # str
body['method'] = method # str
body['url_template'] = url_template # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, operation_id=operation_id, parameters=body)
# module equivalent: azure_rm_apimanagementapioperation
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/operations/{{ operation_name }}
def update_apim_api_operation(cmd, client, body,
resource_group,
service_name,
api_id,
operation_id,
display_name,
method,
url_template,
template_parameters=None,
description=None,
request=None,
responses=None,
policies=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, operation_id=operation_id).as_dict()
body.template_parameters = json.loads(template_parameters) if isinstance(template_parameters, str) else template_parameters
body.description = description # str
body.request = json.loads(request) if isinstance(request, str) else request
body.responses = json.loads(responses) if isinstance(responses, str) else responses
body.policies = policies # str
body.display_name = display_name # str
body.method = method # str
body.url_template = url_template # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, operation_id=operation_id, parameters=body)
# module equivalent: azure_rm_apimanagementapioperation
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/operations/{{ operation_name }}
def list_apim_api_operation(cmd, client,
resource_group,
service_name,
api_id):
return client.list_by_api(resource_group_name=resource_group, service_name=service_name, api_id=api_id)
# module equivalent: azure_rm_apimanagementapioperationpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/operations/{{ operation_name }}/policies/{{ policy_name }}
def create_apim_api_operation_policy(cmd, client,
resource_group,
service_name,
api_id,
operation_id,
policy_id,
value,
_format=None):
body = {}
body['value'] = value # str
body['format'] = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, operation_id=operation_id, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementapioperationpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/operations/{{ operation_name }}/policies/{{ policy_name }}
def update_apim_api_operation_policy(cmd, client, body,
resource_group,
service_name,
api_id,
operation_id,
policy_id,
value,
_format=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, operation_id=operation_id, format=_format, policy_id=policy_id).as_dict()
body.value = value # str
body.format = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, operation_id=operation_id, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementapioperationpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/operations/{{ operation_name }}/policies/{{ policy_name }}
def list_apim_api_operation_policy(cmd, client,
resource_group,
service_name,
api_id,
operation_id):
return client.list_by_operation(resource_group_name=resource_group, service_name=service_name, api_id=api_id, operation_id=operation_id)
# module equivalent: azure_rm_apimanagementtag
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/tags/{{ tag_name }}
def create_apim_tag(cmd, client,
resource_group,
service_name,
tag_id,
display_name):
body = {}
body['display_name'] = display_name # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, tag_id=tag_id, parameters=body)
# module equivalent: azure_rm_apimanagementtag
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/tags/{{ tag_name }}
def update_apim_tag(cmd, client, body,
resource_group,
service_name,
tag_id,
display_name):
body = client.get(resource_group_name=resource_group, service_name=service_name, tag_id=tag_id).as_dict()
body.display_name = display_name # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, tag_id=tag_id, parameters=body)
# module equivalent: azure_rm_apimanagementtag
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/tags/{{ tag_name }}
def list_apim_tag(cmd, client,
resource_group,
service_name):
if resource_group is not None and service_name is not None:
return client.list_by_operation(resource_group_name=resource_group, service_name=service_name)
elif resource_group is not None and service_name is not None:
return client.list_by_product(resource_group_name=resource_group, service_name=service_name)
elif resource_group is not None and service_name is not None:
return client.list_by_api(resource_group_name=resource_group, service_name=service_name)
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementapipolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/policies/{{ policy_name }}
def create_apim_api_policy(cmd, client,
resource_group,
service_name,
api_id,
policy_id,
value,
_format=None):
body = {}
body['value'] = value # str
body['format'] = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementapipolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/policies/{{ policy_name }}
def update_apim_api_policy(cmd, client, body,
resource_group,
service_name,
api_id,
policy_id,
value,
_format=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, policy_id=policy_id, format=_format).as_dict()
body.value = value # str
body.format = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementapipolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/policies/{{ policy_name }}
def list_apim_api_policy(cmd, client,
resource_group,
service_name,
api_id):
return client.list_by_api(resource_group_name=resource_group, service_name=service_name, api_id=api_id)
# module equivalent: azure_rm_apimanagementapischema
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/schemas/{{ schema_name }}
def create_apim_api_schema(cmd, client,
resource_group,
service_name,
api_id,
schema_id,
content_type,
document=None):
body = {}
body['content_type'] = content_type # str
body['document'] = json.loads(document) if isinstance(document, str) else document
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, schema_id=schema_id, parameters=body)
# module equivalent: azure_rm_apimanagementapischema
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/schemas/{{ schema_name }}
def update_apim_api_schema(cmd, client, body,
resource_group,
service_name,
api_id,
schema_id,
content_type,
document=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, schema_id=schema_id).as_dict()
body.content_type = content_type # str
body.document = json.loads(document) if isinstance(document, str) else document
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, schema_id=schema_id, parameters=body)
# module equivalent: azure_rm_apimanagementapischema
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/schemas/{{ schema_name }}
def list_apim_api_schema(cmd, client,
resource_group,
service_name,
api_id):
return client.list_by_api(resource_group_name=resource_group, service_name=service_name, api_id=api_id)
# module equivalent: azure_rm_apimanagementapidiagnostic
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/diagnostics/{{ diagnostic_name }}
def create_apim_api_diagnostic(cmd, client,
resource_group,
service_name,
api_id,
diagnostic_id,
logger_id,
always_log=None,
sampling=None,
frontend=None,
backend=None,
enable_http_correlation_headers=None):
body = {}
body['always_log'] = always_log # str
body['logger_id'] = logger_id # str
body['sampling'] = json.loads(sampling) if isinstance(sampling, str) else sampling
body['frontend'] = json.loads(frontend) if isinstance(frontend, str) else frontend
body['backend'] = json.loads(backend) if isinstance(backend, str) else backend
body['enable_http_correlation_headers'] = enable_http_correlation_headers # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, diagnostic_id=diagnostic_id, parameters=body)
# module equivalent: azure_rm_apimanagementapidiagnostic
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/diagnostics/{{ diagnostic_name }}
def update_apim_api_diagnostic(cmd, client, body,
resource_group,
service_name,
api_id,
diagnostic_id,
logger_id,
always_log=None,
sampling=None,
frontend=None,
backend=None,
enable_http_correlation_headers=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, diagnostic_id=diagnostic_id).as_dict()
body.always_log = always_log # str
body.logger_id = logger_id # str
body.sampling = json.loads(sampling) if isinstance(sampling, str) else sampling
body.frontend = json.loads(frontend) if isinstance(frontend, str) else frontend
body.backend = json.loads(backend) if isinstance(backend, str) else backend
body.enable_http_correlation_headers = enable_http_correlation_headers # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, diagnostic_id=diagnostic_id, parameters=body)
# module equivalent: azure_rm_apimanagementapidiagnostic
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/diagnostics/{{ diagnostic_name }}
def list_apim_api_diagnostic(cmd, client,
resource_group,
service_name,
api_id):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name, api_id=api_id)
# module equivalent: azure_rm_apimanagementapiissue
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}
def create_apim_api_issue(cmd, client,
resource_group,
service_name,
api_id,
issue_id,
title,
description,
user_id,
created_date=None,
state=None):
body = {}
body['created_date'] = created_date # datetime
body['state'] = state # str
body['title'] = title # str
body['description'] = description # str
body['user_id'] = user_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiissue
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}
def update_apim_api_issue(cmd, client, body,
resource_group,
service_name,
api_id,
issue_id,
title,
description,
user_id,
created_date=None,
state=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id).as_dict()
body.created_date = created_date # datetime
body.state = state # str
body.title = title # str
body.description = description # str
body.user_id = user_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiissue
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}
def list_apim_api_issue(cmd, client,
resource_group,
service_name,
api_id):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name, api_id=api_id)
# module equivalent: azure_rm_apimanagementapiissuecomment
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}/comments/{{ comment_name }}
def create_apim_api_issue_comment(cmd, client,
resource_group,
service_name,
api_id,
issue_id,
comment_id,
text,
user_id,
created_date=None):
body = {}
body['text'] = text # str
body['created_date'] = created_date # datetime
body['user_id'] = user_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, comment_id=comment_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiissuecomment
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}/comments/{{ comment_name }}
def update_apim_api_issue_comment(cmd, client, body,
resource_group,
service_name,
api_id,
issue_id,
comment_id,
text,
user_id,
created_date=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, comment_id=comment_id).as_dict()
body.text = text # str
body.created_date = created_date # datetime
body.user_id = user_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, comment_id=comment_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiissuecomment
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}/comments/{{ comment_name }}
def list_apim_api_issue_comment(cmd, client,
resource_group,
service_name,
api_id,
issue_id):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id)
# module equivalent: azure_rm_apimanagementapiissueattachment
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}/attachments/{{ attachment_name }}
def create_apim_api_issue_attachment(cmd, client,
resource_group,
service_name,
api_id,
issue_id,
attachment_id,
title,
content_format,
content):
body = {}
body['title'] = title # str
body['content_format'] = content_format # str
body['content'] = content # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, attachment_id=attachment_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiissueattachment
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}/attachments/{{ attachment_name }}
def update_apim_api_issue_attachment(cmd, client, body,
resource_group,
service_name,
api_id,
issue_id,
attachment_id,
title,
content_format,
content):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, attachment_id=attachment_id).as_dict()
body.title = title # str
body.content_format = content_format # str
body.content = content # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id, attachment_id=attachment_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiissueattachment
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/issues/{{ issue_name }}/attachments/{{ attachment_name }}
def list_apim_api_issue_attachment(cmd, client,
resource_group,
service_name,
api_id,
issue_id):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name, api_id=api_id, issue_id=issue_id)
# module equivalent: azure_rm_apimanagementapitagdescription
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/tagDescriptions/{{ tag_description_name }}
def create_apim_api_tag_description(cmd, client,
resource_group,
service_name,
api_id,
tag_id,
description=None,
external_docs_url=None,
external_docs_description=None):
body = {}
body['description'] = description # str
body['external_docs_url'] = external_docs_url # str
body['external_docs_description'] = external_docs_description # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, tag_id=tag_id, parameters=body)
# module equivalent: azure_rm_apimanagementapitagdescription
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/tagDescriptions/{{ tag_description_name }}
def update_apim_api_tag_description(cmd, client, body,
resource_group,
service_name,
api_id,
tag_id,
description=None,
external_docs_url=None,
external_docs_description=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, api_id=api_id, tag_id=tag_id).as_dict()
body.description = description # str
body.external_docs_url = external_docs_url # str
body.external_docs_description = external_docs_description # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, api_id=api_id, tag_id=tag_id, parameters=body)
# module equivalent: azure_rm_apimanagementapitagdescription
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apis/{{ api_name }}/tagDescriptions/{{ tag_description_name }}
def list_apim_api_tag_description(cmd, client,
resource_group,
service_name,
api_id):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name, api_id=api_id)
# module equivalent: azure_rm_apimanagementapiversionset
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apiVersionSets/{{ api_version_set_name }}
def create_apim_api_version_set(cmd, client,
resource_group,
service_name,
version_set_id,
display_name,
versioning_scheme,
description=None,
version_query_name=None,
version_header_name=None):
body = {}
body['description'] = description # str
body['version_query_name'] = version_query_name # str
body['version_header_name'] = version_header_name # str
body['display_name'] = display_name # str
body['versioning_scheme'] = versioning_scheme # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, version_set_id=version_set_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiversionset
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apiVersionSets/{{ api_version_set_name }}
def update_apim_api_version_set(cmd, client, body,
resource_group,
service_name,
version_set_id,
display_name,
versioning_scheme,
description=None,
version_query_name=None,
version_header_name=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, version_set_id=version_set_id).as_dict()
body.description = description # str
body.version_query_name = version_query_name # str
body.version_header_name = version_header_name # str
body.display_name = display_name # str
body.versioning_scheme = versioning_scheme # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, version_set_id=version_set_id, parameters=body)
# module equivalent: azure_rm_apimanagementapiversionset
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/apiVersionSets/{{ api_version_set_name }}
def list_apim_api_version_set(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementauthorizationserver
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/authorizationServers/{{ authorization_server_name }}
def create_apim_authorization_server(cmd, client,
resource_group,
service_name,
authsid,
display_name,
client_registration_endpoint,
authorization_endpoint,
grant_types,
client_id,
description=None,
authorization_methods=None,
client_authentication_method=None,
token_body_parameters=None,
token_endpoint=None,
support_state=None,
default_scope=None,
bearer_token_sending_methods=None,
client_secret=None,
resource_owner_username=None,
resource_owner_password=None):
body = {}
body['description'] = description # str
body['authorization_methods'] = json.loads(authorization_methods) if isinstance(authorization_methods, str) else authorization_methods
body['client_authentication_method'] = json.loads(client_authentication_method) if isinstance(client_authentication_method, str) else client_authentication_method
body['token_body_parameters'] = json.loads(token_body_parameters) if isinstance(token_body_parameters, str) else token_body_parameters
body['token_endpoint'] = token_endpoint # str
body['support_state'] = support_state # boolean
body['default_scope'] = default_scope # str
body['bearer_token_sending_methods'] = json.loads(bearer_token_sending_methods) if isinstance(bearer_token_sending_methods, str) else bearer_token_sending_methods
body['client_secret'] = client_secret # str
body['resource_owner_username'] = resource_owner_username # str
body['resource_owner_password'] = resource_owner_password # str
body['display_name'] = display_name # str
body['client_registration_endpoint'] = client_registration_endpoint # str
body['authorization_endpoint'] = authorization_endpoint # str
body['grant_types'] = json.loads(grant_types) if isinstance(grant_types, str) else grant_types
body['client_id'] = client_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, authsid=authsid, parameters=body)
# module equivalent: azure_rm_apimanagementauthorizationserver
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/authorizationServers/{{ authorization_server_name }}
def update_apim_authorization_server(cmd, client, body,
resource_group,
service_name,
authsid,
display_name,
client_registration_endpoint,
authorization_endpoint,
grant_types,
client_id,
description=None,
authorization_methods=None,
client_authentication_method=None,
token_body_parameters=None,
token_endpoint=None,
support_state=None,
default_scope=None,
bearer_token_sending_methods=None,
client_secret=None,
resource_owner_username=None,
resource_owner_password=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, authsid=authsid).as_dict()
body.description = description # str
body.authorization_methods = json.loads(authorization_methods) if isinstance(authorization_methods, str) else authorization_methods
body.client_authentication_method = json.loads(client_authentication_method) if isinstance(client_authentication_method, str) else client_authentication_method
body.token_body_parameters = json.loads(token_body_parameters) if isinstance(token_body_parameters, str) else token_body_parameters
body.token_endpoint = token_endpoint # str
body.support_state = support_state # boolean
body.default_scope = default_scope # str
body.bearer_token_sending_methods = json.loads(bearer_token_sending_methods) if isinstance(bearer_token_sending_methods, str) else bearer_token_sending_methods
body.client_secret = client_secret # str
body.resource_owner_username = resource_owner_username # str
body.resource_owner_password = resource_owner_password # str
body.display_name = display_name # str
body.client_registration_endpoint = client_registration_endpoint # str
body.authorization_endpoint = authorization_endpoint # str
body.grant_types = json.loads(grant_types) if isinstance(grant_types, str) else grant_types
body.client_id = client_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, authsid=authsid, parameters=body)
# module equivalent: azure_rm_apimanagementauthorizationserver
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/authorizationServers/{{ authorization_server_name }}
def list_apim_authorization_server(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementbackend
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/backends/{{ backend_name }}
def create_apim_backend(cmd, client,
resource_group,
service_name,
backend_id,
url,
protocol,
title=None,
description=None,
resource_id=None,
service_fabric_cluster=None,
credentials=None,
proxy=None,
tls=None):
body = {}
body['title'] = title # str
body['description'] = description # str
body['resource_id'] = resource_id # str
body['service_fabric_cluster'] = json.loads(service_fabric_cluster) if isinstance(service_fabric_cluster, str) else service_fabric_cluster
body['credentials'] = json.loads(credentials) if isinstance(credentials, str) else credentials
body['proxy'] = json.loads(proxy) if isinstance(proxy, str) else proxy
body['tls'] = json.loads(tls) if isinstance(tls, str) else tls
body['url'] = url # str
body['protocol'] = protocol # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, backend_id=backend_id, parameters=body)
# module equivalent: azure_rm_apimanagementbackend
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/backends/{{ backend_name }}
def update_apim_backend(cmd, client, body,
resource_group,
service_name,
backend_id,
url,
protocol,
title=None,
description=None,
resource_id=None,
service_fabric_cluster=None,
credentials=None,
proxy=None,
tls=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, backend_id=backend_id).as_dict()
body.title = title # str
body.description = description # str
body.resource_id = resource_id # str
body.service_fabric_cluster = json.loads(service_fabric_cluster) if isinstance(service_fabric_cluster, str) else service_fabric_cluster
body.credentials = json.loads(credentials) if isinstance(credentials, str) else credentials
body.proxy = json.loads(proxy) if isinstance(proxy, str) else proxy
body.tls = json.loads(tls) if isinstance(tls, str) else tls
body.url = url # str
body.protocol = protocol # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, backend_id=backend_id, parameters=body)
# module equivalent: azure_rm_apimanagementbackend
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/backends/{{ backend_name }}
def list_apim_backend(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementcache
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/caches/{{ cache_name }}
def create_apim_cache(cmd, client,
resource_group,
service_name,
cache_id,
connection_string,
description=None,
resource_id=None):
body = {}
body['description'] = description # str
body['connection_string'] = connection_string # str
body['resource_id'] = resource_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, cache_id=cache_id, parameters=body)
# module equivalent: azure_rm_apimanagementcache
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/caches/{{ cache_name }}
def update_apim_cache(cmd, client, body,
resource_group,
service_name,
cache_id,
connection_string,
description=None,
resource_id=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, cache_id=cache_id).as_dict()
body.description = description # str
body.connection_string = connection_string # str
body.resource_id = resource_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, cache_id=cache_id, parameters=body)
# module equivalent: azure_rm_apimanagementcache
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/caches/{{ cache_name }}
def list_apim_cache(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementcertificate
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/certificates/{{ certificate_name }}
def create_apim_certificate(cmd, client,
resource_group,
service_name,
certificate_id,
data,
password):
body = {}
body['data'] = data # str
body['password'] = password # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, certificate_id=certificate_id, parameters=body)
# module equivalent: azure_rm_apimanagementcertificate
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/certificates/{{ certificate_name }}
def update_apim_certificate(cmd, client, body,
resource_group,
service_name,
certificate_id,
data,
password):
body = client.get(resource_group_name=resource_group, service_name=service_name, certificate_id=certificate_id).as_dict()
body.data = data # str
body.password = password # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, certificate_id=certificate_id, parameters=body)
# module equivalent: azure_rm_apimanagementcertificate
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/certificates/{{ certificate_name }}
def list_apim_certificate(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementservice
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}
def create_apim(cmd, client,
resource_group,
name,
publisher_email,
publisher_name,
sku_name,
location,
tags=None,
notification_sender_email=None,
hostname_configurations=None,
virtual_network_configuration=None,
additional_locations=None,
custom_properties=None,
certificates=None,
enable_client_certificate=None,
virtual_network_type=None,
sku_capacity=None,
identity=None):
body = {}
body['tags'] = tags # dictionary
body['notification_sender_email'] = notification_sender_email # str
body['hostname_configurations'] = json.loads(hostname_configurations) if isinstance(hostname_configurations, str) else hostname_configurations
body['virtual_network_configuration'] = json.loads(virtual_network_configuration) if isinstance(virtual_network_configuration, str) else virtual_network_configuration
body['additional_locations'] = json.loads(additional_locations) if isinstance(additional_locations, str) else additional_locations
body['custom_properties'] = custom_properties # dictionary
body['certificates'] = json.loads(certificates) if isinstance(certificates, str) else certificates
body['enable_client_certificate'] = enable_client_certificate # boolean
body['virtual_network_type'] = virtual_network_type # str
body['publisher_email'] = publisher_email # str
body['publisher_name'] = publisher_name # str
body.setdefault('sku', {})['name'] = sku_name # str
body.setdefault('sku', {})['capacity'] = sku_capacity # number
body['identity'] = json.loads(identity) if isinstance(identity, str) else identity
body['location'] = location # str
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementservice
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}
def update_apim(cmd, client, body,
resource_group,
name,
publisher_email,
publisher_name,
sku_name,
location,
tags=None,
notification_sender_email=None,
hostname_configurations=None,
virtual_network_configuration=None,
additional_locations=None,
custom_properties=None,
certificates=None,
enable_client_certificate=None,
virtual_network_type=None,
sku_capacity=None,
identity=None):
body = client.get(resource_group_name=resource_group, service_name=name).as_dict()
body.tags = tags # dictionary
body.notification_sender_email = notification_sender_email # str
body.hostname_configurations = json.loads(hostname_configurations) if isinstance(hostname_configurations, str) else hostname_configurations
body.virtual_network_configuration = json.loads(virtual_network_configuration) if isinstance(virtual_network_configuration, str) else virtual_network_configuration
body.additional_locations = json.loads(additional_locations) if isinstance(additional_locations, str) else additional_locations
body.custom_properties = custom_properties # dictionary
body.certificates = json.loads(certificates) if isinstance(certificates, str) else certificates
body.enable_client_certificate = enable_client_certificate # boolean
body.virtual_network_type = virtual_network_type # str
body.publisher_email = publisher_email # str
body.publisher_name = publisher_name # str
body.sku.name = sku_name # str
body.sku.capacity = sku_capacity # number
body.identity = json.loads(identity) if isinstance(identity, str) else identity
body.location = location # str
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementservice
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}
def list_apim(cmd, client,
resource_group):
if resource_group is not None:
return client.list_by_resource_group(resource_group_name=resource_group)
return client.list()
# module equivalent: azure_rm_apimanagementdiagnostic
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/diagnostics/{{ diagnostic_name }}
def create_apim_diagnostic(cmd, client,
resource_group,
service_name,
diagnostic_id,
logger_id,
always_log=None,
sampling=None,
frontend=None,
backend=None,
enable_http_correlation_headers=None):
body = {}
body['always_log'] = always_log # str
body['logger_id'] = logger_id # str
body['sampling'] = json.loads(sampling) if isinstance(sampling, str) else sampling
body['frontend'] = json.loads(frontend) if isinstance(frontend, str) else frontend
body['backend'] = json.loads(backend) if isinstance(backend, str) else backend
body['enable_http_correlation_headers'] = enable_http_correlation_headers # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, diagnostic_id=diagnostic_id, parameters=body)
# module equivalent: azure_rm_apimanagementdiagnostic
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/diagnostics/{{ diagnostic_name }}
def update_apim_diagnostic(cmd, client, body,
resource_group,
service_name,
diagnostic_id,
logger_id,
always_log=None,
sampling=None,
frontend=None,
backend=None,
enable_http_correlation_headers=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, diagnostic_id=diagnostic_id).as_dict()
body.always_log = always_log # str
body.logger_id = logger_id # str
body.sampling = json.loads(sampling) if isinstance(sampling, str) else sampling
body.frontend = json.loads(frontend) if isinstance(frontend, str) else frontend
body.backend = json.loads(backend) if isinstance(backend, str) else backend
body.enable_http_correlation_headers = enable_http_correlation_headers # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, diagnostic_id=diagnostic_id, parameters=body)
# module equivalent: azure_rm_apimanagementdiagnostic
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/diagnostics/{{ diagnostic_name }}
def list_apim_diagnostic(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementemailtemplate
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/templates/{{ template_name }}
def create_apim_template(cmd, client,
resource_group,
service_name,
name,
subject=None,
title=None,
description=None,
body=None):
body = {}
body['subject'] = subject # str
body['title'] = title # str
body['description'] = description # str
body['body'] = body # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, template_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementemailtemplate
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/templates/{{ template_name }}
def update_apim_template(cmd, client, body,
resource_group,
service_name,
name,
subject=None,
title=None,
description=None,
body=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, template_name=name).as_dict()
body.subject = subject # str
body.title = title # str
body.description = description # str
body.body = body # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, template_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementemailtemplate
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/templates/{{ template_name }}
def list_apim_template(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementgroup
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/groups/{{ group_name }}
def create_apim_group(cmd, client,
resource_group,
service_name,
group_id,
display_name,
description=None,
_type=None,
external_id=None):
body = {}
body['display_name'] = display_name # str
body['description'] = description # str
body['type'] = _type # str
body['external_id'] = external_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, group_id=group_id, parameters=body)
# module equivalent: azure_rm_apimanagementgroup
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/groups/{{ group_name }}
def update_apim_group(cmd, client, body,
resource_group,
service_name,
group_id,
display_name,
description=None,
_type=None,
external_id=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, group_id=group_id).as_dict()
body.display_name = display_name # str
body.description = description # str
body.type = _type # str
body.external_id = external_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, group_id=group_id, parameters=body)
# module equivalent: azure_rm_apimanagementgroup
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/groups/{{ group_name }}
def list_apim_group(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementgroupuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/groups/{{ group_name }}/users/{{ user_name }}
def create_apim_group_user(cmd, client,
resource_group,
service_name,
group_id,
user_id,
state=None,
note=None,
identities=None,
first_name=None,
last_name=None,
email=None,
registration_date=None,
groups=None):
return client.create(resource_group_name=resource_group, service_name=service_name, group_id=group_id, user_id=user_id)
# module equivalent: azure_rm_apimanagementgroupuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/groups/{{ group_name }}/users/{{ user_name }}
def list_apim_group_user(cmd, client,
resource_group,
service_name,
group_id):
return client.list(resource_group_name=resource_group, service_name=service_name, group_id=group_id)
# module equivalent: azure_rm_apimanagementidentityprovider
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/identityProviders/{{ identity_provider_name }}
def create_apim_identity_provider(cmd, client,
resource_group,
service_name,
name,
client_id,
client_secret,
_type=None,
allowed_tenants=None,
authority=None,
signup_policy_name=None,
signin_policy_name=None,
profile_editing_policy_name=None,
password_reset_policy_name=None):
body = {}
body['type'] = _type # str
body['allowed_tenants'] = json.loads(allowed_tenants) if isinstance(allowed_tenants, str) else allowed_tenants
body['authority'] = authority # str
body['signup_policy_name'] = signup_policy_name # str
body['signin_policy_name'] = signin_policy_name # str
body['profile_editing_policy_name'] = profile_editing_policy_name # str
body['password_reset_policy_name'] = password_reset_policy_name # str
body['client_id'] = client_id # str
body['client_secret'] = client_secret # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, identity_provider_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementidentityprovider
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/identityProviders/{{ identity_provider_name }}
def update_apim_identity_provider(cmd, client, body,
resource_group,
service_name,
name,
client_id,
client_secret,
_type=None,
allowed_tenants=None,
authority=None,
signup_policy_name=None,
signin_policy_name=None,
profile_editing_policy_name=None,
password_reset_policy_name=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, identity_provider_name=name).as_dict()
body.type = _type # str
body.allowed_tenants = json.loads(allowed_tenants) if isinstance(allowed_tenants, str) else allowed_tenants
body.authority = authority # str
body.signup_policy_name = signup_policy_name # str
body.signin_policy_name = signin_policy_name # str
body.profile_editing_policy_name = profile_editing_policy_name # str
body.password_reset_policy_name = password_reset_policy_name # str
body.client_id = client_id # str
body.client_secret = client_secret # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, identity_provider_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementidentityprovider
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/identityProviders/{{ identity_provider_name }}
def list_apim_identity_provider(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementlogger
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/loggers/{{ logger_name }}
def create_apim_logger(cmd, client,
resource_group,
service_name,
logger_id,
logger_type,
credentials,
description=None,
is_buffered=None,
resource_id=None):
body = {}
body['logger_type'] = logger_type # str
body['description'] = description # str
body['credentials'] = credentials # dictionary
body['is_buffered'] = is_buffered # boolean
body['resource_id'] = resource_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, logger_id=logger_id, parameters=body)
# module equivalent: azure_rm_apimanagementlogger
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/loggers/{{ logger_name }}
def update_apim_logger(cmd, client, body,
resource_group,
service_name,
logger_id,
logger_type,
credentials,
description=None,
is_buffered=None,
resource_id=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, logger_id=logger_id).as_dict()
body.logger_type = logger_type # str
body.description = description # str
body.credentials = credentials # dictionary
body.is_buffered = is_buffered # boolean
body.resource_id = resource_id # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, logger_id=logger_id, parameters=body)
# module equivalent: azure_rm_apimanagementlogger
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/loggers/{{ logger_name }}
def list_apim_logger(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementnotification
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}
def create_apim_notification(cmd, client,
resource_group,
service_name,
name,
title,
description=None,
recipients=None):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, notification_name=name)
# module equivalent: azure_rm_apimanagementnotification
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}
def update_apim_notification(cmd, client, body,
resource_group,
service_name,
name,
title,
description=None,
recipients=None):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, notification_name=name)
# module equivalent: azure_rm_apimanagementnotification
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}
def list_apim_notification(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementnotificationrecipientuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}/recipientUsers/{{ recipient_user_name }}
def create_apim_notification_recipient_user(cmd, client,
resource_group,
service_name,
notification_name,
user_id):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, notification_name=notification_name, user_id=user_id)
# module equivalent: azure_rm_apimanagementnotificationrecipientuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}/recipientUsers/{{ recipient_user_name }}
def update_apim_notification_recipient_user(cmd, client, body,
resource_group,
service_name,
notification_name,
user_id):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, notification_name=notification_name, user_id=user_id)
# module equivalent: azure_rm_apimanagementnotificationrecipientuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}/recipientUsers/{{ recipient_user_name }}
def list_apim_notification_recipient_user(cmd, client,
resource_group,
service_name,
notification_name):
return client.list_by_notification(resource_group_name=resource_group, service_name=service_name, notification_name=notification_name)
# module equivalent: azure_rm_apimanagementnotificationrecipientemail
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}/recipientEmails/{{ recipient_email_name }}
def create_apim_notification_recipient_email(cmd, client,
resource_group,
service_name,
notification_name,
email):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, notification_name=notification_name, email=email)
# module equivalent: azure_rm_apimanagementnotificationrecipientemail
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}/recipientEmails/{{ recipient_email_name }}
def update_apim_notification_recipient_email(cmd, client, body,
resource_group,
service_name,
notification_name,
email):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, notification_name=notification_name, email=email)
# module equivalent: azure_rm_apimanagementnotificationrecipientemail
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/notifications/{{ notification_name }}/recipientEmails/{{ recipient_email_name }}
def list_apim_notification_recipient_email(cmd, client,
resource_group,
service_name,
notification_name):
return client.list_by_notification(resource_group_name=resource_group, service_name=service_name, notification_name=notification_name)
# module equivalent: azure_rm_apimanagementopenidconnectprovider
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/openidConnectProviders/{{ openid_connect_provider_name }}
def create_apim_openid_connect_provider(cmd, client,
resource_group,
service_name,
opid,
display_name,
metadata_endpoint,
client_id,
description=None,
client_secret=None):
body = {}
body['display_name'] = display_name # str
body['description'] = description # str
body['metadata_endpoint'] = metadata_endpoint # str
body['client_id'] = client_id # str
body['client_secret'] = client_secret # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, opid=opid, parameters=body)
# module equivalent: azure_rm_apimanagementopenidconnectprovider
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/openidConnectProviders/{{ openid_connect_provider_name }}
def update_apim_openid_connect_provider(cmd, client, body,
resource_group,
service_name,
opid,
display_name,
metadata_endpoint,
client_id,
description=None,
client_secret=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, opid=opid).as_dict()
body.display_name = display_name # str
body.description = description # str
body.metadata_endpoint = metadata_endpoint # str
body.client_id = client_id # str
body.client_secret = client_secret # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, opid=opid, parameters=body)
# module equivalent: azure_rm_apimanagementopenidconnectprovider
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/openidConnectProviders/{{ openid_connect_provider_name }}
def list_apim_openid_connect_provider(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/policies/{{ policy_name }}
def create_apim_policy(cmd, client,
resource_group,
service_name,
policy_id,
value,
_format=None):
body = {}
body['value'] = value # str
body['format'] = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/policies/{{ policy_name }}
def update_apim_policy(cmd, client, body,
resource_group,
service_name,
policy_id,
value,
_format=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, policy_id=policy_id, format=_format).as_dict()
body.value = value # str
body.format = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/policies/{{ policy_name }}
def list_apim_policy(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementsigninsetting
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/portalsettings/signin
def create_apim_portalsetting_signin(cmd, client,
resource_group,
name,
enabled=None):
body = {}
body['enabled'] = enabled # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementsigninsetting
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/portalsettings/signin
def update_apim_portalsetting_signin(cmd, client, body,
resource_group,
name,
enabled=None):
body = client.get(resource_group_name=resource_group, service_name=name).as_dict()
body.enabled = enabled # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementsignupsetting
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/portalsettings/signup
def create_apim_portalsetting_signup(cmd, client,
resource_group,
name,
enabled=None,
terms_of_service=None):
body = {}
body['enabled'] = enabled # boolean
body['terms_of_service'] = json.loads(terms_of_service) if isinstance(terms_of_service, str) else terms_of_service
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementsignupsetting
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/portalsettings/signup
def update_apim_portalsetting_signup(cmd, client, body,
resource_group,
name,
enabled=None,
terms_of_service=None):
body = client.get(resource_group_name=resource_group, service_name=name).as_dict()
body.enabled = enabled # boolean
body.terms_of_service = json.loads(terms_of_service) if isinstance(terms_of_service, str) else terms_of_service
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementdelegationsetting
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/portalsettings/delegation
def create_apim_portalsetting_delegation(cmd, client,
resource_group,
name,
url=None,
validation_key=None,
subscriptions=None,
user_registration=None):
body = {}
body['url'] = url # str
body['validation_key'] = validation_key # str
body['subscriptions'] = json.loads(subscriptions) if isinstance(subscriptions, str) else subscriptions
body['user_registration'] = json.loads(user_registration) if isinstance(user_registration, str) else user_registration
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementdelegationsetting
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/portalsettings/delegation
def update_apim_portalsetting_delegation(cmd, client, body,
resource_group,
name,
url=None,
validation_key=None,
subscriptions=None,
user_registration=None):
body = client.get(resource_group_name=resource_group, service_name=name).as_dict()
body.url = url # str
body.validation_key = validation_key # str
body.subscriptions = json.loads(subscriptions) if isinstance(subscriptions, str) else subscriptions
body.user_registration = json.loads(user_registration) if isinstance(user_registration, str) else user_registration
return client.create_or_update(resource_group_name=resource_group, service_name=name, parameters=body)
# module equivalent: azure_rm_apimanagementproduct
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}
def create_apim_product(cmd, client,
resource_group,
service_name,
product_id,
display_name,
description=None,
terms=None,
subscription_required=None,
approval_required=None,
subscriptions_limit=None,
state=None):
body = {}
body['description'] = description # str
body['terms'] = terms # str
body['subscription_required'] = subscription_required # boolean
body['approval_required'] = approval_required # boolean
body['subscriptions_limit'] = subscriptions_limit # number
body['state'] = state # str
body['display_name'] = display_name # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, parameters=body)
# module equivalent: azure_rm_apimanagementproduct
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}
def update_apim_product(cmd, client, body,
resource_group,
service_name,
product_id,
display_name,
description=None,
terms=None,
subscription_required=None,
approval_required=None,
subscriptions_limit=None,
state=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, product_id=product_id).as_dict()
body.description = description # str
body.terms = terms # str
body.subscription_required = subscription_required # boolean
body.approval_required = approval_required # boolean
body.subscriptions_limit = subscriptions_limit # number
body.state = state # str
body.display_name = display_name # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, parameters=body)
# module equivalent: azure_rm_apimanagementproduct
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}
def list_apim_product(cmd, client,
resource_group,
service_name):
if resource_group is not None and service_name is not None:
return client.list_by_tags(resource_group_name=resource_group, service_name=service_name)
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementproductapi
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/apis/{{ api_name }}
def create_apim_product_api(cmd, client,
resource_group,
service_name,
product_id,
api_id,
path,
description=None,
authentication_settings=None,
subscription_key_parameter_names=None,
_type=None,
api_revision=None,
api_version=None,
is_current=None,
is_online=None,
api_revision_description=None,
api_version_description=None,
api_version_set_id=None,
subscription_required=None,
source_api_id=None,
display_name=None,
service_url=None,
protocols=None,
api_version_set=None):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, api_id=api_id)
# module equivalent: azure_rm_apimanagementproductapi
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/apis/{{ api_name }}
def update_apim_product_api(cmd, client, body,
resource_group,
service_name,
product_id,
api_id,
path,
description=None,
authentication_settings=None,
subscription_key_parameter_names=None,
_type=None,
api_revision=None,
api_version=None,
is_current=None,
is_online=None,
api_revision_description=None,
api_version_description=None,
api_version_set_id=None,
subscription_required=None,
source_api_id=None,
display_name=None,
service_url=None,
protocols=None,
api_version_set=None):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, api_id=api_id)
# module equivalent: azure_rm_apimanagementproductapi
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/apis/{{ api_name }}
def list_apim_product_api(cmd, client,
resource_group,
service_name,
product_id):
return client.list_by_product(resource_group_name=resource_group, service_name=service_name, product_id=product_id)
# module equivalent: azure_rm_apimanagementproductgroup
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/groups/{{ group_name }}
def create_apim_product_group(cmd, client,
resource_group,
service_name,
product_id,
group_id,
display_name,
description=None,
built_in=None,
_type=None,
external_id=None):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, group_id=group_id)
# module equivalent: azure_rm_apimanagementproductgroup
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/groups/{{ group_name }}
def update_apim_product_group(cmd, client, body,
resource_group,
service_name,
product_id,
group_id,
display_name,
description=None,
built_in=None,
_type=None,
external_id=None):
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, group_id=group_id)
# module equivalent: azure_rm_apimanagementproductgroup
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/groups/{{ group_name }}
def list_apim_product_group(cmd, client,
resource_group,
service_name,
product_id):
return client.list_by_product(resource_group_name=resource_group, service_name=service_name, product_id=product_id)
# module equivalent: azure_rm_apimanagementproductpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/policies/{{ policy_name }}
def create_apim_product_policy(cmd, client,
resource_group,
service_name,
product_id,
policy_id,
value,
_format=None):
body = {}
body['value'] = value # str
body['format'] = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementproductpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/policies/{{ policy_name }}
def update_apim_product_policy(cmd, client, body,
resource_group,
service_name,
product_id,
policy_id,
value,
_format=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, product_id=product_id, policy_id=policy_id, format=_format).as_dict()
body.value = value # str
body.format = _format # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, product_id=product_id, policy_id=policy_id, parameters=body)
# module equivalent: azure_rm_apimanagementproductpolicy
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/products/{{ product_name }}/policies/{{ policy_name }}
def list_apim_product_policy(cmd, client,
resource_group,
service_name,
product_id):
return client.list_by_product(resource_group_name=resource_group, service_name=service_name, product_id=product_id)
# module equivalent: azure_rm_apimanagementproperty
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/properties/{{ property_name }}
def create_apim_property(cmd, client,
resource_group,
service_name,
prop_id,
display_name,
value,
tags=None,
secret=None):
body = {}
body['tags'] = json.loads(tags) if isinstance(tags, str) else tags
body['secret'] = secret # boolean
body['display_name'] = display_name # str
body['value'] = value # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, prop_id=prop_id, parameters=body)
# module equivalent: azure_rm_apimanagementproperty
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/properties/{{ property_name }}
def update_apim_property(cmd, client, body,
resource_group,
service_name,
prop_id,
display_name,
value,
tags=None,
secret=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, prop_id=prop_id).as_dict()
body.tags = json.loads(tags) if isinstance(tags, str) else tags
body.secret = secret # boolean
body.display_name = display_name # str
body.value = value # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, prop_id=prop_id, parameters=body)
# module equivalent: azure_rm_apimanagementproperty
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/properties/{{ property_name }}
def list_apim_property(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementsubscription
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/subscriptions/{{ subscription_id }}
def create_apim_subscription(cmd, client,
resource_group,
service_name,
sid,
scope,
display_name,
notify=None,
owner_id=None,
primary_key=None,
secondary_key=None,
state=None,
allow_tracing=None):
body = {}
body['owner_id'] = owner_id # str
body['scope'] = scope # str
body['display_name'] = display_name # str
body['primary_key'] = primary_key # str
body['secondary_key'] = secondary_key # str
body['state'] = state # str
body['allow_tracing'] = allow_tracing # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, sid=sid, parameters=body, notify=notify)
# module equivalent: azure_rm_apimanagementsubscription
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/subscriptions/{{ subscription_id }}
def update_apim_subscription(cmd, client, body,
resource_group,
service_name,
sid,
scope,
display_name,
notify=None,
owner_id=None,
primary_key=None,
secondary_key=None,
state=None,
allow_tracing=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, sid=sid).as_dict()
body.owner_id = owner_id # str
body.scope = scope # str
body.display_name = display_name # str
body.primary_key = primary_key # str
body.secondary_key = secondary_key # str
body.state = state # str
body.allow_tracing = allow_tracing # boolean
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, sid=sid, parameters=body, notify=notify)
# module equivalent: azure_rm_apimanagementsubscription
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/subscriptions/{{ subscription_id }}
def list_apim_subscription(cmd, client,
resource_group,
service_name):
return client.list(resource_group_name=resource_group, service_name=service_name)
# module equivalent: azure_rm_apimanagementuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}
def create_apim_user(cmd, client,
resource_group,
service_name,
user_id,
email,
first_name,
last_name,
state=None,
note=None,
identities=None,
password=None,
confirmation=None):
body = {}
body['state'] = state # str
body['note'] = note # str
body['identities'] = json.loads(identities) if isinstance(identities, str) else identities
body['email'] = email # str
body['first_name'] = first_name # str
body['last_name'] = last_name # str
body['password'] = password # str
body['confirmation'] = confirmation # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, user_id=user_id, parameters=body)
# module equivalent: azure_rm_apimanagementuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}
def update_apim_user(cmd, client, body,
resource_group,
service_name,
user_id,
email,
first_name,
last_name,
state=None,
note=None,
identities=None,
password=None,
confirmation=None):
body = client.get(resource_group_name=resource_group, service_name=service_name, user_id=user_id).as_dict()
body.state = state # str
body.note = note # str
body.identities = json.loads(identities) if isinstance(identities, str) else identities
body.email = email # str
body.first_name = first_name # str
body.last_name = last_name # str
body.password = password # str
body.confirmation = confirmation # str
return client.create_or_update(resource_group_name=resource_group, service_name=service_name, user_id=user_id, parameters=body)
# module equivalent: azure_rm_apimanagementuser
# URL: /subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group }}/providers/Microsoft.ApiManagement/service/{{ service_name }}/users/{{ user_name }}
def list_apim_user(cmd, client,
resource_group,
service_name):
return client.list_by_service(resource_group_name=resource_group, service_name=service_name)
| 57.727225
| 222
| 0.620384
| 10,662
| 108,989
| 5.992778
| 0.027106
| 0.10885
| 0.079505
| 0.095407
| 0.988074
| 0.980139
| 0.973848
| 0.967963
| 0.955521
| 0.948259
| 0
| 0
| 0.295544
| 108,989
| 1,887
| 223
| 57.757817
| 0.832205
| 0.260347
| 0
| 0.74807
| 0
| 0
| 0.027082
| 0.007698
| 0
| 0
| 0
| 0
| 0
| 1
| 0.079298
| false
| 0.011228
| 0.000702
| 0.030175
| 0.163509
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
144afaefd28425f27f247f537449df2b9270822a
| 103
|
py
|
Python
|
lf3py/serialization/errors.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | null | null | null |
lf3py/serialization/errors.py
|
rog-works/lf3py
|
e89937f7aa133ed54d85764f06101ab9abf6b960
|
[
"CNRI-Python"
] | 48
|
2020-12-19T13:47:26.000Z
|
2021-01-07T22:27:56.000Z
|
lf3py/serialization/errors.py
|
rog-works/lambda-fw
|
715b36fc2d8d0ea0388aa4ac1336dc8cd5543778
|
[
"CNRI-Python"
] | null | null | null |
from lf3py.errors import Error
class SerializeError(Error): pass
class DeserializeError(Error): pass
| 17.166667
| 35
| 0.815534
| 13
| 103
| 6.461538
| 0.692308
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0.116505
| 103
| 5
| 36
| 20.6
| 0.912088
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.666667
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
1471d2d1bbc8908b29f8b27bf86e9cfe35687c59
| 570
|
py
|
Python
|
functions/render.py
|
mateusfg7/Jogo-da-velha
|
2c162a2599c56e4324d745e168115702969415b4
|
[
"MIT"
] | 2
|
2020-01-02T17:06:08.000Z
|
2020-01-20T13:19:07.000Z
|
functions/render.py
|
mateusfg7/Jogo-da-velha
|
2c162a2599c56e4324d745e168115702969415b4
|
[
"MIT"
] | 5
|
2020-01-03T20:49:35.000Z
|
2020-01-06T20:59:40.000Z
|
functions/render.py
|
mateusfg7/Jogo-da-velha
|
2c162a2599c56e4324d745e168115702969415b4
|
[
"MIT"
] | null | null | null |
import os
def render(player, casas):
os.system('clear')
print(f"\033[00;37mPlayer Atual: \033[00;31m{player}\033[00;37m\n")
print(
f"\033[00;31m{casas['1']} \033[00;37m| \033[00;31m{casas['2']} \033[00;37m| \033[00;31m{casas['3']}")
print(f"\033[00;37m---------")
print(
f"\033[00;31m{casas['4']} \033[00;37m| \033[00;31m{casas['5']} \033[00;37m| \033[00;31m{casas['6']}")
print(f"\033[00;37m---------")
print(
f"\033[00;31m{casas['7']} \033[00;37m| \033[00;31m{casas['8']} \033[00;37m| \033[00;31m{casas['9']}")
| 31.666667
| 109
| 0.542105
| 102
| 570
| 3.029412
| 0.254902
| 0.323625
| 0.2589
| 0.378641
| 0.682848
| 0.682848
| 0.621359
| 0.213592
| 0.213592
| 0.213592
| 0
| 0.307851
| 0.150877
| 570
| 17
| 110
| 33.529412
| 0.330579
| 0
| 0
| 0.416667
| 0
| 0.333333
| 0.689474
| 0.419298
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.083333
| 0
| 0.166667
| 0.5
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
147b61fdfa46c25ea833b989bad65a5a36ce164e
| 43,052
|
py
|
Python
|
tests/tests.py
|
fretbuzz/mimir_snakemake_t2
|
ece4f63ff02f35b7098fa3b77a9b49b6cd294d95
|
[
"Apache-2.0"
] | 1
|
2019-11-04T18:35:37.000Z
|
2019-11-04T18:35:37.000Z
|
tests/tests.py
|
fretbuzz/mimir_snakemake_t2
|
ece4f63ff02f35b7098fa3b77a9b49b6cd294d95
|
[
"Apache-2.0"
] | 1
|
2021-06-01T23:52:06.000Z
|
2021-06-01T23:52:06.000Z
|
tests/tests.py
|
fretbuzz/mimir_snakemake_t2
|
ece4f63ff02f35b7098fa3b77a9b49b6cd294d95
|
[
"Apache-2.0"
] | null | null | null |
import math
import unittest
import networkx as nx
import numpy as np
import pandas as pd
from analysis_pipeline.generate_graphs import get_points_to_plot
import analysis_pipeline.simplified_graph_metrics
import multiprocessing
from analysis_pipeline.pcap_to_edgelists import create_mappings
class testSyntheticAttackInjector(unittest.TestCase):
@classmethod
def setUpClass(cls):
pass
def test_injector(self):
print "test_injector"
file_paths = ['./tests/wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt']
counter_starting = 0
svcs = ["my-release-pxc", "wwwppp-wordpress"]
is_swarm = 0
ms_s = svcs
current_total_node_list = []
svc_to_pod = {}
node_attack_mapping = {}
total_edgelist_nodes = []
avg_dns_weight = 0
avg_dns_pkts = 0
container_info_path = "/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_docker_0_network_configs.txt"
cilium_config_path = None # does NOT use cilium on reps 2-4
kubernetes_svc_info = '/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_svc_config_0.txt'
kubernetes_pod_info = '/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_pod_config_0.txt'
container_to_ip, infra_service = create_mappings(is_swarm, container_info_path, kubernetes_svc_info,
kubernetes_pod_info, cilium_config_path, ms_s)
initiator_info_for_paths = None # not actually need so no big deal
name_of_dns_pod_node = None
injected_file_path = './tests/injected_edgefiles/with_nodeAttribsavg_exfil_10000:0_avg_pkt_500:0_wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt'
pruned_without_injected = './tests/pruned_edgefiles/wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt'
last_attack_injected = None
carryover = 0
synthetic_exfil_paths = [['my_release_pxc_pod', 'wwwppp_wordpress_vip', 'wwwppp_wordpress_pod', 'internet']]
attacks_to_times = [(0,1)]
time_interval = 30
out_q = multiprocessing.Queue()
avg_exfil_per_min = 10000
exfil_per_min_variance = 0
avg_pkt_size = 500
pkt_size_variance = 0
end_of_training= 200
pod_creation_log = None
analysis_pipeline.simplified_graph_metrics.process_and_inject_single_graph(counter_starting, file_paths, svcs,
is_swarm, ms_s, container_to_ip, infra_service, synthetic_exfil_paths, initiator_info_for_paths,
attacks_to_times, time_interval, total_edgelist_nodes, svc_to_pod, avg_dns_weight, avg_dns_pkts,
node_attack_mapping, out_q, current_total_node_list, name_of_dns_pod_node, last_attack_injected,
carryover, avg_exfil_per_min, exfil_per_min_variance, avg_pkt_size, pkt_size_variance,
end_of_training, pod_creation_log )
# okay, now I actually need to see if it did the right thing...
#G = nx.DiGraph()
G= nx.read_gpickle( injected_file_path )
print "ZZZZZZ"
G_pruned_without_injected = nx.DiGraph()
f = open(pruned_without_injected, 'r')
lines = f.readlines()
nx.parse_edgelist(lines, delimiter=' ', create_using=G_pruned_without_injected, data=[('frames',int),('weight', int)])
edges_in_inject_but_not_pruned = []
weight_differences = []
different_edges = []
for (u,v,d) in G.edges(data=True):
#print (u,v,d), d['weight'], G[u][v]['weight']
try:
weight_difference = d['weight'] - G_pruned_without_injected[u][v]['weight']
if weight_difference != 0:
weight_differences.append(weight_difference)
different_edges.append((u,v,d))
except:
weight_differences.append(d['weight'])
different_edges.append((u, v, d, 'wasnt_in_pruned'))
print "weight_differences ", weight_differences
print "different_edges",different_edges
print "in_injected_but_not_pruned", edges_in_inject_but_not_pruned
for edge in edges_in_inject_but_not_pruned:
print "in_injected_but_not_pruned", edge
#print G.nodes()
self.assertEqual(len(weight_differences), 6)
self.assertEqual(len([i for i in weight_differences if i == 400]), 3)
self.assertEqual(len([i for i in weight_differences if i == 5000]), 3)
def test_injector_not_doing_anything(self):
print "test_injector"
file_paths = [
'./tests/wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt']
counter_starting = 2
svcs = ["my-release-pxc", "wwwppp-wordpress"]
is_swarm = 0
ms_s = svcs
current_total_node_list = []
svc_to_pod = {}
node_attack_mapping = {}
total_edgelist_nodes = []
avg_dns_weight = 0
avg_dns_pkts = 0
container_info_path = "/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_docker_0_network_configs.txt"
cilium_config_path = None # does NOT use cilium on reps 2-4
kubernetes_svc_info = '/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_svc_config_0.txt'
kubernetes_pod_info = '/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_pod_config_0.txt'
container_to_ip, infra_service = create_mappings(is_swarm, container_info_path, kubernetes_svc_info,
kubernetes_pod_info, cilium_config_path, ms_s)
initiator_info_for_paths = None # not actually need so no big deal
name_of_dns_pod_node = None
injected_file_path = './tests/injected_edgefiles/with_nodeAttribsavg_exfil_10000:0_avg_pkt_500:0_wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt'
pruned_without_injected = './tests/pruned_edgefiles/wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt'
last_attack_injected = None
carryover = 0
synthetic_exfil_paths = [['my_release_pxc_pod', 'wwwppp_wordpress_vip', 'wwwppp_wordpress_pod', 'internet']]
attacks_to_times = [(0, 1)]
time_interval = 30
out_q = multiprocessing.Queue()
avg_exfil_per_min = 10000
exfil_per_min_variance = 0
avg_pkt_size = 500
pkt_size_variance = 0
end_of_training = 200
pod_creation_log = None # this is GOOD b/c there's no pod_creation_log
analysis_pipeline.simplified_graph_metrics.process_and_inject_single_graph(counter_starting, file_paths,
svcs,
is_swarm, ms_s, container_to_ip,
infra_service,
synthetic_exfil_paths,
initiator_info_for_paths,
attacks_to_times, time_interval,
total_edgelist_nodes, svc_to_pod,
avg_dns_weight, avg_dns_pkts,
node_attack_mapping, out_q,
current_total_node_list,
name_of_dns_pod_node,
last_attack_injected,
carryover, avg_exfil_per_min,
exfil_per_min_variance,
avg_pkt_size, pkt_size_variance,
end_of_training,
pod_creation_log)
# okay, now I actually need to see if it did the right thing...
# G = nx.DiGraph()
G = nx.read_gpickle(injected_file_path)
print "ZZZZZZ"
G_pruned_without_injected = nx.DiGraph()
f = open(pruned_without_injected, 'r')
lines = f.readlines()
nx.parse_edgelist(lines, delimiter=' ', create_using=G_pruned_without_injected,
data=[('frames', int), ('weight', int)])
edges_in_inject_but_not_pruned = []
weight_differences = []
different_edges = []
for (u, v, d) in G.edges(data=True):
# print (u,v,d), d['weight'], G[u][v]['weight']
try:
weight_difference = d['weight'] - G_pruned_without_injected[u][v]['weight']
if weight_difference != 0:
weight_differences.append(weight_difference)
different_edges.append((u, v, d))
except:
weight_differences.append(d['weight'])
different_edges.append((u, v, d, 'wasnt_in_pruned'))
print "weight_differences_nothing ", weight_differences
print "different_edges", different_edges
print "in_injected_but_not_pruned", edges_in_inject_but_not_pruned
for edge in edges_in_inject_but_not_pruned:
print "in_injected_but_not_pruned", edge
# print G.nodes()
self.assertEqual(len(weight_differences), 0)
#self.assertEqual(len([i for i in weight_differences if i == 400]), 3)
#self.assertEqual(len([i for i in weight_differences if i == 5000]), 3)
def test_dns_injection(self):
print "test_injector"
file_paths = [
'./tests/wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt']
counter_starting = 0
svcs = ["my-release-pxc", "wwwppp-wordpress"]
is_swarm = 0
ms_s = svcs
current_total_node_list = []
svc_to_pod = {}
node_attack_mapping = {}
total_edgelist_nodes = []
avg_dns_weight = 0
avg_dns_pkts = 0
container_info_path = "/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_docker_0_network_configs.txt"
cilium_config_path = None # does NOT use cilium on reps 2-4
kubernetes_svc_info = '/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_svc_config_0.txt'
kubernetes_pod_info = '/Volumes/exM2/experimental_data/wordpress_info/wordpress_thirteen_t1/wordpress_thirteen_t1_pod_config_0.txt'
container_to_ip, infra_service = create_mappings(is_swarm, container_info_path, kubernetes_svc_info,
kubernetes_pod_info, cilium_config_path, ms_s)
initiator_info_for_paths = None # not actually need so no big deal
name_of_dns_pod_node = None
injected_file_path = './tests/injected_edgefiles/with_nodeAttribsavg_exfil_10000:0_avg_pkt_500:0_wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt'
pruned_without_injected = './tests/pruned_edgefiles/wordpress_thirteen_t1_default_bridge_0any_split_00036_20190220141725_edges.txt'
last_attack_injected = None
carryover = 0
synthetic_exfil_paths = [['my_release_pxc_pod', 'my_release_pxc_vip', 'wwwppp_wordpress_pod', 'kube_dns_vip', 'kube_dns_pod',
'internet']]
attacks_to_times = [(0, 1)]
time_interval = 30
out_q = multiprocessing.Queue()
avg_exfil_per_min = 10000
exfil_per_min_variance = 0
avg_pkt_size = 500
pkt_size_variance = 0
end_of_training = 200
pod_creation_log = None
analysis_pipeline.simplified_graph_metrics.process_and_inject_single_graph(counter_starting, file_paths,
svcs,
is_swarm, ms_s, container_to_ip,
infra_service,
synthetic_exfil_paths,
initiator_info_for_paths,
attacks_to_times, time_interval,
total_edgelist_nodes, svc_to_pod,
avg_dns_weight, avg_dns_pkts,
node_attack_mapping, out_q,
current_total_node_list,
name_of_dns_pod_node,
last_attack_injected,
carryover, avg_exfil_per_min,
exfil_per_min_variance,
avg_pkt_size, pkt_size_variance,
end_of_training, pod_creation_log)
# okay, now I actually need to see if it did the right thing...
# G = nx.DiGraph()
G = nx.read_gpickle(injected_file_path)
print "ZZZZZZ"
G_pruned_without_injected = nx.DiGraph()
f = open(pruned_without_injected, 'r')
lines = f.readlines()
nx.parse_edgelist(lines, delimiter=' ', create_using=G_pruned_without_injected,
data=[('frames', int), ('weight', int)])
edges_in_inject_but_not_pruned = []
weight_differences = []
different_edges = []
for (u, v, d) in G.edges(data=True):
# print (u,v,d), d['weight'], G[u][v]['weight']
try:
weight_difference = d['weight'] - G_pruned_without_injected[u][v]['weight']
if weight_difference != 0:
weight_differences.append(weight_difference)
different_edges.append((u, v, d))
except:
weight_differences.append(d['weight'])
different_edges.append((u, v, d, 'wasnt_in_pruned'))
edges_in_inject_but_not_pruned.append((u, v, d))
print "weight_differences_dns ", weight_differences
print "different_edges", different_edges
print "in_injected_but_not_pruned", edges_in_inject_but_not_pruned
for edge in different_edges:
print "different_edges_indiv", edge
# print G.nodes()
self.assertEqual(len(weight_differences), 10)
self.assertEqual(len([i for i in weight_differences if i == 400]), 5)
self.assertEqual(len([i for i in weight_differences if i == 5000]), 5)
print "edges_in_inject_but_not_pruned",edges_in_inject_but_not_pruned
self.assertEqual(len(edges_in_inject_but_not_pruned), 4)
class TestChangePoint(unittest.TestCase):
maxDiff = None
# okay, so what am I trying to do here?
# well, I am going to try to test this the change-point detection
# method.
# okay, step (1): load data that can be used to test it
# this can be done, via:
# (a) identify edge files
# (b) read in via analyze_edge functions
# (c) call as needd
# okay, I am going to take a quick detour to test the angles function
# but I will return to work on this function eventually
@classmethod
def setUpClass(cls):
# TODO: should put back in so these tests actually work again...
#filenames = ['./tests/seastore_swarm_0.00_0.10.txt', './tests/seastore_swarm_0.10_0.10.txt', './tests/seastore_swarm_0.20_0.10.txt',
# './tests/seastore_swarm_0.30_0.10.txt', './tests/seastore_swarm_0.40_0.10.txt', './tests/seastore_swarm_0.50_0.10.txt',
# './tests/seastore_swarm_0.60_0.10.txt', './tests/seastore_swarm_0.70_0.10.txt', './tests/seastore_swarm_0.80_0.10.txt',
# './tests/seastore_swarm_0.90_0.10.txt', './tests/seastore_swarm_1.00_0.10.txt']
filenames = []
list_of_graphs = []
for file_path in filenames:
G = nx.DiGraph()
print "path to file is ", file_path
nx.read_edgelist(file_path,
create_using=G, delimiter=',', data=(('weight', float),))
list_of_graphs.append(G)
total_node_list = []
for cur_g in list_of_graphs:
for node in cur_g.nodes():
total_node_list.append(node)
cls.total_node_list = list(set(total_node_list))
# arbitrarily picking this value (also, networkx is purely calculating this,
# so I'm only testing change-point detection)
cls.outstrength_dicts = []
for cur_G in list_of_graphs:
outstrength_dict = {}
for (u, v, data) in cur_G.edges(data=True):
if u in outstrength_dict:
outstrength_dict[u] += data['weight']
else:
outstrength_dict[u] = data['weight']
cls.outstrength_dicts.append( outstrength_dict )
#outstrength_degrees_eigenvector = change_point_detection(outstrength_dicts, 4, total_node_list)
#print outstrength_degrees_eigenvector
cls.dict1 = {'front-end.1': 15, 'user.1': 25, 'user-db.1': 14, 'user.2': 22}
cls.dict2 = {'front-end.1': 17, 'user.1': 27, 'user-db.1': 16, 'user.2': 20}
cls.dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18, 'user.2': 18}
cls.dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.1': 20, 'user.2': 16}
cls.dict5 = {'front-end.1': 23, 'user.1': 33, 'user-db.1': 22, 'user.2': 14}
cls.dict6 = {'front-end.1': 25, 'user.1': 35, 'user-db.1': 24, 'user.2': 12}
cls.dict7 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 10}
cls.dict8 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 30}
cls.dict9 = {'front-end.1': 52, 'user.1': 62, 'user-db.1': 10, 'user.2': 30}
cls.tensor = [cls.dict1, cls.dict2, cls.dict3, cls.dict4, cls.dict5, cls.dict6, cls.dict7, cls.dict8, cls.dict9]
cls.total_node_list = ['front-end.1', 'user.1', 'user-db.1', 'user.2']
#''' Not sure if this is needed
#def test_changepoint_with_outstrength(self):
# outstrength_changepoint_angles = change_point_detection(self.outstrength_dicts, 4, self.total_node_list)
# print "outstrength_changepoint_angles", outstrength_changepoint_angles
# # I'm going to need actual tests, but I think these pure edgefiles
# # are too much, maybe reduce them somehow?
#'''
### TODO: write some tests for change_point_detection(tensor, window_size, nodes_in_tensor):
def change_point_detection_zz(self):
## note: tensor is really a *list of dictionaries*, with keys of nodes_in_tensor
print "self_tensor",self.tensor
change_point_angles = change_point_detection(self.tensor, 4, [])
print "change_point_angles", change_point_angles
print "ZZZZ"
self.assertEqual(1,1)
def test_find_angles_simplest(self):
# find_angles(list_of_vectors, window_size)
# okay, to test this I am going to need to do 2 things:
# (1) some lists of vectors, and
# (2) the angles between them
array1 = np.array([1,1])
array2 = np.array([1,1])
array3 = np.array([1,1])
array4 = np.array([1,1])
list_of_arrays = [array1, array2, array3, array4]
expected_angles = [float('nan'), 0.0, 0.0, 0.0]
computed_angles = find_angles(list_of_arrays, window_size=1)
#self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
self.assertTrue(math.isnan(computed_angles[i]))
else:
self.assertAlmostEqual(computed_angles[i], expected_angles[i])
def test_find_angles_simple_averaging(self):
# find_angles(list_of_vectors, window_size)
# okay, to test this I am going to need to do 2 things:
# (1) some lists of vectors, and
# (2) the angles between them
array1 = np.array([1, 1])
array2 = np.array([2, 2])
array3 = np.array([3, 3])
array4 = np.array([1, 1])
list_of_arrays = [array1, array2, array3, array4]
expected_angles = [float('nan'), float('nan'), 0.0, 0.0]
computed_angles = find_angles(list_of_arrays, window_size=2)
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(computed_angles[i]):
self.assertTrue(math.isnan(computed_angles[i]))
else:
self.assertAlmostEqual(computed_angles[i], expected_angles[i])
print "averaging worked"
def test_find_angles_averaging(self):
# find_angles(list_of_vectors, window_size)
# okay, to test this I am going to need to do 2 things:
# (1) some lists of vectors, and
# (2) the angles between them
array1 = np.array([1, 1])
array2 = np.array([4, 2])
array3 = np.array([2, 4])
array4 = np.array([1000, 1000])
list_of_arrays = [array1, array2, array3, array4]
expected_angles = [float('nan'), float('nan'), 0.5667292, 0.0]
computed_angles = find_angles(list_of_arrays, window_size=2)
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
self.assertTrue(math.isnan(computed_angles[i]))
else:
self.assertAlmostEqual(computed_angles[i], expected_angles[i])
print "averaging worked"
def test_find_angles_empty(self):
# find_angles(list_of_vectors, window_size)
# okay, to test this I am going to need to do 2 things:
# (1) some lists of vectors, and
# (2) the angles between them
array1 = np.array([1, 1])
array2 = np.array([4, 2])
array3 = np.array([])
array4 = np.array([])
array5 = np.array([1000, 1000])
array6 = np.array([1000, 1000])
array7 = np.array([1000, 1000])
list_of_arrays = [array1, array2, array3, array4, array5, array6, array7]
expected_angles = [float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), 0.0, 0.0] # 7 - 2 = 5
computed_angles = find_angles(list_of_arrays, window_size=2)
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
print "empty angles", expected_angles[i], computed_angles[i]
self.assertTrue(math.isnan(computed_angles[i]))
else:
print computed_angles[i], expected_angles[i]
self.assertAlmostEqual(computed_angles[i], expected_angles[i])
print "averaging worked"
#'''
def test_find_angles_zero(self):
# find_angles(list_of_vectors, window_size)
# okay, to test this I am going to need to do 2 things:
# (1) some lists of vectors, and
# (2) the angles between them
array1 = np.array([0, 0])
array2 = np.array([4, 2])
array3 = np.array([2,4])
array4 = np.array([0, 0])
array5 = np.array([1000, 1000])
array6 = np.array([1000, 1000])
array7 = np.array([1000, 1000])
list_of_arrays = [array1, array2, array3, array4, array5, array6, array7]
expected_angles = [float('nan'), float('nan'), .6435011, float('nan'), .32175055, 0.0, 0.0] # 7 - 5 = 2
computed_angles = find_angles(list_of_arrays, window_size=2)
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
print "zero angles", expected_angles[i], computed_angles[i]
self.assertTrue(math.isnan(computed_angles[i]))
else:
print computed_angles[i], expected_angles[i]
self.assertAlmostEqual(computed_angles[i], expected_angles[i])
print "averaging worked"
#'''
def test_find_angles_different_sizes(self):
array1 = np.array([0, 0])
array2 = np.array([4, 2])
array3 = np.array([2,4])
array4 = np.array([0, 0, 4])
array5 = np.array([1000, 1000, 3])
array6 = np.array([1000, 1000, 2])
array7 = np.array([1000, 1000])
list_of_arrays = [array1, array2, array3, array4, array5, array6, array7]
expected_angles = [float('nan'), float('nan'), .6435011, 1.5707963, 0.7833984, 0.00353549, 0.0017677651] #7-5=2
computed_angles = find_angles(list_of_arrays, window_size=2)
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
print "zero angles", expected_angles[i], computed_angles[i]
self.assertTrue(math.isnan(computed_angles[i]))
else:
print computed_angles[i], expected_angles[i]
self.assertAlmostEqual(computed_angles[i], expected_angles[i])
print "averaging worked"
def test_change_point_too_small_window_size(self):
window_size = 2
with self.assertRaises(SystemExit):
angles = change_point_detection(self.tensor, window_size, [])
def test_change_point_tensor_empty(self):
angles = change_point_detection([], 4, [])
self.assertEqual([], angles)
''' put back in!!1
def test_change_point_tensor_none_dict(self):
# n * (n - 1) = 4 * 3 = 12
dict1 = {'front-end.1': 15, 'user.1': 25, 'user-db.1': 14, 'user.2': 22}
dict2 = {'front-end.1': 17, 'user.1': 27, 'user-db.1': 16, 'user.2': 20}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18, 'user.2': 18}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.1': 20, 'user.2': 16}
dict5 = {'front-end.1': 25, 'user.1': 35, 'user-db.1': 24, 'user.2': 12}
dict6 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 10}
dict7 = {'front-end.1': 29, 'user.1': 39, 'user-db.1': 28, 'user.2': 8}
dict8 = {'front-end.1': 31, 'user.1': 41, 'user-db.1': 30, 'user.2': 6} # 1-7 compared to this initially
dict9 = {'front-end.1': 33, 'user.1': 43, 'user-db.1': 32, 'user.2': 4}
dict10 = None
dict11 = {'front-end.1': 37, 'user.1': 47, 'user-db.1': 36, 'user.2': 0}
dict12 = {'front-end.1': 39, 'user.1': 49, 'user-db.1': 38, 'user.2': -2}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9, dict10, dict11, dict12]
angles = change_point_detection(tensor, 4, [])
# returns list of angles (of size len(tensor)). Note:
print "test_change_point_tensor_none_dict", angles
expected_angles = [float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), float('nan'),
float('nan'), 0.0, 0.0, 0.0, 0.0, 0.0] # 12 - 5 = 7 nan's
print "None", angles
for i in range(0,7):
self.assertTrue(math.isnan(angles[i]))
self.assertEquals(angles[7:], expected_angles[7:])
'''
'''
def test_change_point_tensor_dict_val_nan(self):
dict1 = {'front-end.1': 15, 'user.1': 25, 'user-db.1': 14, 'user.2': 22}
dict2 = {'front-end.1': 17, 'user.1': 27, 'user-db.1': 16, 'user.2': 20}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18, 'user.2': 18}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.1': 20, 'user.2': 16}
dict5 = {'front-end.1': 25, 'user.1': 35, 'user-db.1': 24, 'user.2': 12}
dict6 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 10}
dict7 = {'front-end.1': 21, 'user.1': float('nan'), 'user-db.1': 20, 'user.2': 16}
dict8 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 10} # compare to this initially
dict9 = {'front-end.1': 52, 'user.1': 62, 'user-db.1': 10, 'user.2': 30}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9]
angles = change_point_detection(tensor, 4, [])
print 'nan dictVal', angles
self.assertEqual(angles[0], 0.0)
self.assertNotEqual(angles[1], 0.0)
# todo: the eigenvector calculation function returns wierd answers and causes this test to fail... don't why why...
'''
''' ##TODO: fix
def test_change_point_nodes_disappear(self):
dict1 = {'front-end.1': 15, 'user.1': 25, 'user-db.1': 14, 'user.2': 22}
dict2 = {'front-end.1': 17, 'user.1': 27, 'user-db.1': 16, 'user.2': 20}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18, 'user.2': 18}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.1': 20, 'user.2': 16}
dict5 = {'front-end.1': 23, 'user.1': 33, 'user-db.1': 22, 'user.2': 14}
dict6 = {'front-end.1': 25, 'user.1': 35, 'user-db.1': 24, 'user.2': 12}
dict7 = {'front-end.1': 37, 'user.1': 47, 'user-db.1': 36, 'user.2': 0}
dict8 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 10} # compares here initially
dict9 = {'front-end.1': 29, 'user.1': 39, 'user-db.1': 28}
dict10 = {'front-end.1': 31, 'user.1': 41, 'user-db.1': 30}
dict11 = {'front-end.1': 40, 'user.1': 50, 'user-db.1': 39}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9, dict10, dict11]
angles = change_point_detection(tensor, 4, []) # 11 - 4 = 7 nan's
print "disappear", angles
for counter,angle in enumerate(angles):
print counter,angle
for i in range(0,7):
self.assertTrue(math.isnan(angles[i]))
self.assertEqual(angles[7], 0)
self.assertNotEqual(angles[8], 0)
self.assertNotEqual(angles[9], 0)
self.assertNotEqual(angles[10], 0)
'''
'''##TODO:FIX
def test_change_point_nodes_extra_appear(self):
dict1 = {'front-end.1': 15, 'user.1': 25, 'user.2': 22}
dict2 = {'front-end.1': 17, 'user.1': 27, 'user.2': 20}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user.2': 18}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user.2': 16}
dict5 = {'front-end.1': 23, 'user.1': 33, 'user.2': 14}
dict6 = {'front-end.1': 25, 'user.1': 35, 'user.2': 12}
dict7 = {'front-end.1': 25, 'user.1': 35, 'user.2': 12}
dict8 = {'front-end.1': 25, 'user.1': 35, 'user.2': 12} # start comparisons here
dict9 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 10}
dict10 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user.2': 30}
dict11 = {'front-end.1': 52, 'user.1': 62, 'user-db.1': 10, 'user.2': 30}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9, dict10, dict11]
angles = change_point_detection(tensor, 4, [])
print "appear", angles
# 11 - 4 = 7
for i in range(0,7):
self.assertTrue(math.isnan(angles[i]))
self.assertEqual(angles[7], 0.0)
self.assertNotEqual(angles[8], 0.0)
self.assertNotEqual(angles[9], 0.0)
self.assertNotEqual(angles[10], 0.0)
'''
''' ### TODO: FIX!!
def test_change_point_nodes_perfect_corr(self):
dict1 = {'front-end.1': 15, 'user.1': 25, 'user.2': 22}
dict2 = {'front-end.1': 17, 'user.1': 27, 'user.2': 20}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user.2': 18}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user.2': 16}
dict5 = {'front-end.1': 23, 'user.1': 33, 'user.2': 14}
dict6 = {'front-end.1': 25, 'user.1': 35, 'user.2': 12}
dict7 = {'front-end.1': 27, 'user.1': 37, 'user.2': 10}
dict8 = {'front-end.1': 29, 'user.1': 39, 'user.2': 8}
dict9 = {'front-end.1': 35, 'user.1': 45, 'user.2': 2}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9]
expected_angles = [float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), 0.0, 0.0, 0.0, 0.0] # 9 - 4 = 5
angles = change_point_detection(tensor, 3, [])
print "change_point_detection_qqqq", angles, tensor
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
self.assertTrue(math.isnan(angles[i]))
else:
self.assertEqual(expected_angles[i], angles[i])
'''
def test_change_point_tensor_no_angle_again(self):
dict1 = {'front-end.1': 15, 'user.1': 25, 'user-db.1': 14}
dict2 = {'front-end.1': 17, 'user.1': 27, 'user-db.1': 16}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.1': 20}
dict5 = {'front-end.1': 25, 'user.1': 35, 'user-db.1': 24}
dict6 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26}
dict7 = {'front-end.1': 29, 'user.1': 39, 'user-db.1': 28}
dict8 = {'front-end.1': 31, 'user.1': 41, 'user-db.1': 30}
dict9 = {'front-end.1': 35, 'user.1': 45, 'user-db.1': 34}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9] # 9 - 4 = 5
angles = change_point_detection(tensor, 3, [])
expected_angles = [float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), 0.0, 0.0, 0.0, 0.0]
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
self.assertTrue(math.isnan(angles[i]))
else:
self.assertEqual(expected_angles[i], angles[i])
def test_change_point_tensor_one_decreasing(self):
dict1 = {'front-end.1': 15, 'user.1': 25, 'user-db.1': 14, 'user-db.2': 24}
dict2 = {'front-end.1': 17, 'user.1': 27, 'user-db.1': 16, 'user-db.2': 22}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18, 'user-db.2': 20}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.1': 20, 'user-db.2': 18}
dict5 = {'front-end.1': 25, 'user.1': 35, 'user-db.1': 24, 'user-db.2': 14}
dict6 = {'front-end.1': 27, 'user.1': 37, 'user-db.1': 26, 'user-db.2': 12}
dict7 = {'front-end.1': 29, 'user.1': 39, 'user-db.1': 28, 'user-db.2': 10}
dict8 = {'front-end.1': 31, 'user.1': 41, 'user-db.1': 30, 'user-db.2': 8}
dict9 = {'front-end.1': 35, 'user.1': 45, 'user-db.1': 34, 'user-db.2': 4}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9]
angles = change_point_detection(tensor, 3, [])
expected_angles = [float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), 0.0, 0.0, 0.0, 0.0] # 9 - 4 = 5
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
self.assertTrue(math.isnan(angles[i]))
else:
self.assertEqual(expected_angles[i], angles[i])
def test_change_point_tensor_one_reordered(self):
dict1 = {'front-end.1': 15, 'user-db.2': 24, 'user.1': 25, 'user-db.1': 14}
dict2 = {'user-db.2': 22, 'front-end.1': 17, 'user.1': 27, 'user-db.1': 16}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18, 'user-db.2': 20}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.2': 18, 'user-db.1': 20}
dict5 = {'front-end.1': 25, 'user-db.2': 14, 'user.1': 35, 'user-db.1': 24}
dict6 = {'user-db.2': 12, 'front-end.1': 27, 'user.1': 37, 'user-db.1': 26}
dict7 = {'front-end.1': 29, 'user.1': 39, 'user-db.1': 28, 'user-db.2': 10}
dict8 = {'front-end.1': 31, 'user.1': 41, 'user-db.2': 8, 'user-db.1': 30}
dict9 = {'front-end.1': 35, 'user-db.2': 4, 'user.1': 45, 'user-db.1': 34}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9]
angles = change_point_detection(tensor, 3, [])
expected_angles = [float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), 0.0, 0.0, 0.0, 0.0] # 9 - 4 = 5
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(expected_angles)):
if math.isnan(expected_angles[i]):
self.assertTrue(math.isnan(angles[i]))
else:
self.assertEqual(expected_angles[i], angles[i])
def test_change_point_tensor_one_reordered_spike(self):
dict1 = {'front-end.1': 15, 'user-db.2': 24, 'user.1': 25, 'user-db.1': 14}
dict2 = {'user-db.2': 22, 'front-end.1': 17, 'user.1': 27, 'user-db.1': 16}
dict3 = {'front-end.1': 19, 'user.1': 29, 'user-db.1': 18, 'user-db.2': 20}
dict4 = {'front-end.1': 21, 'user.1': 31, 'user-db.2': 18, 'user-db.1': 20}
dict5 = {'front-end.1': 25, 'user-db.2': 14, 'user.1': 35, 'user-db.1': 24}
dict6 = {'user-db.2': 12, 'front-end.1': 27, 'user.1': 37, 'user-db.1': 26}
dict7 = {'front-end.1': 29, 'user.1': 39, 'user-db.1': 38, 'user-db.2': 10} # user-db.1 spikes upwards
dict8 = {'front-end.1': 31, 'user.1': 41, 'user-db.2': 8, 'user-db.1': 30}
dict9 = {'front-end.1': 35, 'user-db.2': 4, 'user.1': 45, 'user-db.1': 34}
dict10 = {'front-end.1': 37, 'user-db.2': 2, 'user.1': 47, 'user-db.1': 36}
dict11 = {'front-end.1': 39, 'user-db.2': 0, 'user.1': 49, 'user-db.1': 38}
dict12 = {'front-end.1': 41, 'user-db.2': -2, 'user.1': 51, 'user-db.1': 40}
dict13 = {'front-end.1': 43, 'user-db.2': -4, 'user.1': 53, 'user-db.1': 42}
tensor = [dict1, dict2, dict3, dict4, dict5, dict6, dict7, dict8, dict9, dict10, dict11, dict12, dict13]
angles = change_point_detection(tensor, 3, [])
print "reordered_spike", angles
# used to have 13 - (3 + 2) = 8 vals. So 5 nan's in beginning are new...
# self.assertEquals(computed_angles , [0.0, 0.0, 0.0])
for i in range(0, len(angles)):
if i < 5:
self.assertTrue(math.isnan(angles[i]))
elif i == 5 or i == 12:
self.assertEqual(0.0, angles[i])
else:
print "i", i, angles[i]
self.assertNotEqual(angles[i], 0)
# hmm... I might need to think about this a little more....
def test_get_points_to_plot(self):
vals = [5, 9, 3, 4, 19, 30, 20, 67, 89, 32]
time_grand = 10
exfil_start = 50
exfil_end = 60
wiggle_room = 2 #seconds
vals = get_points_to_plot(time_grand, vals, exfil_start, exfil_end, wiggle_room)
# expect vals to be [30]
print "vals for test_get_points_to_plot", vals
self.assertEquals(vals, ([19, 30, 20],4,6))
## TODO tests for:: ide_angles_results = ide_angles(adjacency_matrixes, 6, total_edgelist_nodes)
def test_ide_angle(self):
test_matrix = pd.DataFrame(np.array([[-7, 2],[8,-1]]))
print "test_matrix", test_matrix
test_matrix_two = pd.DataFrame(np.array([[1,1],[0,-1]]))
print "test_matrix_two",test_matrix_two
test_matrix_three = pd.DataFrame(np.array([[9,8],[1,2]]))
angles = ide_angles([test_matrix, test_matrix_two, test_matrix_three], window_size=1, nodes_in_tensor=[]) # note: third attrib doesn't matter
#print "adjacency_matrix_eigenvectors", adjacency_matrix_eigenvectors
# okay, so the first principal eigenvector should be (-1,1), a.k.a. (-0.707107, 0.707107)
# then the second principal eigenvector should be (1,0)
# third principal eigenvector should be (8,1), a.k.a. (0.992278, 0.124035)
# which means that the first angle should be 135 degrees, or 2.35619 radians
# and the first angle should be 7.125 degrees, or 0.12435471 radians
print "test_ide_angles", angles
self.assertTrue(math.isnan(angles[0]))
self.assertAlmostEqual(angles[1], 2.3561945)
self.assertAlmostEqual(angles[2], 0.12435499)
def test_ide_angle_longer_window(self):
test_matrix = pd.DataFrame(np.array([[-7, 2],[8,-1]]))
test_matrix_two = pd.DataFrame(np.array([[1,1],[0,-1]]))
test_matrix_three = pd.DataFrame(np.array([[9,8],[1,2]]))
angles = ide_angles([test_matrix, test_matrix_two, test_matrix_three], window_size=2, nodes_in_tensor=[]) # note: third attrib doesn't matter
#print "adjacency_matrix_eigenvectors", adjacency_matrix_eigenvectors
# okay, so the first principal eigenvector should be (-1,1), a.k.a. (-0.707107, 0.707107)
# then the second principal eigenvector should be (1,0)
# average of the first two eigenvectors is: (0.382683, 0.92388)
# third principal eigenvector should be (8,1), a.k.a. (0.992278, 0.124035)
# and the second angle should be 60.37497 degrees, or 1.0537420123 radians
print "test_ide_angles_window_size_two", angles
self.assertTrue(math.isnan(angles[0]))
self.assertTrue(math.isnan(angles[1]))
self.assertAlmostEqual(angles[2],1.05374225)
if __name__ == "__main__":
unittest.main()
#singletest = unittest.TestSuite()
#singletest.addTest(testSyntheticAttackInjector())
#unittest.TextTestRunner().run(singletest)
#singletest.addTest(testSyntheticAttackInjector())
| 52.889435
| 184
| 0.574445
| 5,801
| 43,052
| 4.046027
| 0.084985
| 0.01014
| 0.038729
| 0.011248
| 0.820843
| 0.795279
| 0.774701
| 0.753866
| 0.746794
| 0.740701
| 0
| 0.081349
| 0.289882
| 43,052
| 813
| 185
| 52.95449
| 0.68638
| 0.122108
| 0
| 0.660679
| 1
| 0
| 0.158603
| 0.075196
| 0
| 0
| 0
| 0.00738
| 0.077844
| 0
| null | null | 0.001996
| 0.017964
| null | null | 0.081836
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
148b75babea36876a6ef19146a5495ebb3ceb099
| 27,871
|
py
|
Python
|
Contents/Libraries/Shared/PicartoClientAPI/apis/webhook_api.py
|
Sythelux/Picarto.bundle
|
f2e9e9e75421b15c562c961c8c31090c508166ff
|
[
"BSD-3-Clause"
] | null | null | null |
Contents/Libraries/Shared/PicartoClientAPI/apis/webhook_api.py
|
Sythelux/Picarto.bundle
|
f2e9e9e75421b15c562c961c8c31090c508166ff
|
[
"BSD-3-Clause"
] | 5
|
2018-01-29T23:18:20.000Z
|
2018-01-29T23:57:15.000Z
|
Contents/Libraries/Shared/PicartoClientAPI/apis/webhook_api.py
|
Sythelux/Picarto.bundle
|
f2e9e9e75421b15c562c961c8c31090c508166ff
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
Picarto.TV API Documentation
The Picarto.TV API documentation Note, for fixed access tokens, the header that needs to be sent is of the format: `Authorization: Bearer yourTokenHere` This can be generated at https://oauth.picarto.tv/ For chat API, see https://docs.picarto.tv/chat/chat.proto - contact via the email below for implementation details
OpenAPI spec version: 1.2.5
Contact: api@picarto.tv
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class WebhookApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def webhooks_get(self, client_id, client_secret, **kwargs):
"""
Get all registered webhooks for your account
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_get(client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:param int channel_id: A channel ID to filter by
:return: list[Webhook]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_get_with_http_info(client_id, client_secret, **kwargs)
else:
(data) = self.webhooks_get_with_http_info(client_id, client_secret, **kwargs)
return data
def webhooks_get_with_http_info(self, client_id, client_secret, **kwargs):
"""
Get all registered webhooks for your account
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_get_with_http_info(client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:param int channel_id: A channel ID to filter by
:return: list[Webhook]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['client_id', 'client_secret', 'channel_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'client_id' is set
if ('client_id' not in params) or (params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `webhooks_get`")
# verify the required parameter 'client_secret' is set
if ('client_secret' not in params) or (params['client_secret'] is None):
raise ValueError("Missing the required parameter `client_secret` when calling `webhooks_get`")
collection_formats = {}
path_params = {}
query_params = []
if 'client_id' in params:
query_params.append(('client_id', params['client_id']))
if 'client_secret' in params:
query_params.append(('client_secret', params['client_secret']))
if 'channel_id' in params:
query_params.append(('channel_id', params['channel_id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json; charset=utf-8', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/webhooks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Webhook]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_post(self, type, uri, **kwargs):
"""
Register a webhook
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_post(type, uri, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: The webhook type (required)
:param str uri: Webhook destination URI (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_post_with_http_info(type, uri, **kwargs)
else:
(data) = self.webhooks_post_with_http_info(type, uri, **kwargs)
return data
def webhooks_post_with_http_info(self, type, uri, **kwargs):
"""
Register a webhook
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_post_with_http_info(type, uri, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str type: The webhook type (required)
:param str uri: Webhook destination URI (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['type', 'uri']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'type' is set
if ('type' not in params) or (params['type'] is None):
raise ValueError("Missing the required parameter `type` when calling `webhooks_post`")
# verify the required parameter 'uri' is set
if ('uri' not in params) or (params['uri'] is None):
raise ValueError("Missing the required parameter `uri` when calling `webhooks_post`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'type' in params:
form_params.append(('type', params['type']))
if 'uri' in params:
form_params.append(('uri', params['uri']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json; charset=utf-8', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = ['OAuth2Access', 'OAuth2Implicit']
return self.api_client.call_api('/webhooks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_webhook_id_delete(self, webhook_id, client_id, client_secret, **kwargs):
"""
Delete a webhook
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_webhook_id_delete(webhook_id, client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int webhook_id: The webhook ID (required)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_webhook_id_delete_with_http_info(webhook_id, client_id, client_secret, **kwargs)
else:
(data) = self.webhooks_webhook_id_delete_with_http_info(webhook_id, client_id, client_secret, **kwargs)
return data
def webhooks_webhook_id_delete_with_http_info(self, webhook_id, client_id, client_secret, **kwargs):
"""
Delete a webhook
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_webhook_id_delete_with_http_info(webhook_id, client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int webhook_id: The webhook ID (required)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['webhook_id', 'client_id', 'client_secret']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_webhook_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'webhook_id' is set
if ('webhook_id' not in params) or (params['webhook_id'] is None):
raise ValueError("Missing the required parameter `webhook_id` when calling `webhooks_webhook_id_delete`")
# verify the required parameter 'client_id' is set
if ('client_id' not in params) or (params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `webhooks_webhook_id_delete`")
# verify the required parameter 'client_secret' is set
if ('client_secret' not in params) or (params['client_secret'] is None):
raise ValueError("Missing the required parameter `client_secret` when calling `webhooks_webhook_id_delete`")
collection_formats = {}
path_params = {}
if 'webhook_id' in params:
path_params['webhook_id'] = params['webhook_id']
query_params = []
if 'client_id' in params:
query_params.append(('client_id', params['client_id']))
if 'client_secret' in params:
query_params.append(('client_secret', params['client_secret']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json; charset=utf-8', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/webhooks/{webhook_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_webhook_id_get(self, webhook_id, client_id, client_secret, **kwargs):
"""
Get a webhook
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_webhook_id_get(webhook_id, client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int webhook_id: The webhook ID (required)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_webhook_id_get_with_http_info(webhook_id, client_id, client_secret, **kwargs)
else:
(data) = self.webhooks_webhook_id_get_with_http_info(webhook_id, client_id, client_secret, **kwargs)
return data
def webhooks_webhook_id_get_with_http_info(self, webhook_id, client_id, client_secret, **kwargs):
"""
Get a webhook
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_webhook_id_get_with_http_info(webhook_id, client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int webhook_id: The webhook ID (required)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:return: Webhook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['webhook_id', 'client_id', 'client_secret']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_webhook_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'webhook_id' is set
if ('webhook_id' not in params) or (params['webhook_id'] is None):
raise ValueError("Missing the required parameter `webhook_id` when calling `webhooks_webhook_id_get`")
# verify the required parameter 'client_id' is set
if ('client_id' not in params) or (params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `webhooks_webhook_id_get`")
# verify the required parameter 'client_secret' is set
if ('client_secret' not in params) or (params['client_secret'] is None):
raise ValueError("Missing the required parameter `client_secret` when calling `webhooks_webhook_id_get`")
collection_formats = {}
path_params = {}
if 'webhook_id' in params:
path_params['webhook_id'] = params['webhook_id']
query_params = []
if 'client_id' in params:
query_params.append(('client_id', params['client_id']))
if 'client_secret' in params:
query_params.append(('client_secret', params['client_secret']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json; charset=utf-8', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/webhooks/{webhook_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Webhook',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def webhooks_webhook_id_put(self, webhook_id, uri, client_id, client_secret, **kwargs):
"""
Update a webhook's URI Note: You can not change the webhook type. To do that you must create a new webhook, authenticated by the user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_webhook_id_put(webhook_id, uri, client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int webhook_id: The webhook ID (required)
:param str uri: Webhook destination URI (required)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.webhooks_webhook_id_put_with_http_info(webhook_id, uri, client_id, client_secret, **kwargs)
else:
(data) = self.webhooks_webhook_id_put_with_http_info(webhook_id, uri, client_id, client_secret, **kwargs)
return data
def webhooks_webhook_id_put_with_http_info(self, webhook_id, uri, client_id, client_secret, **kwargs):
"""
Update a webhook's URI Note: You can not change the webhook type. To do that you must create a new webhook, authenticated by the user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.webhooks_webhook_id_put_with_http_info(webhook_id, uri, client_id, client_secret, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int webhook_id: The webhook ID (required)
:param str uri: Webhook destination URI (required)
:param str client_id: Your application's client ID (required)
:param str client_secret: Your application's client secret (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['webhook_id', 'uri', 'client_id', 'client_secret']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method webhooks_webhook_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'webhook_id' is set
if ('webhook_id' not in params) or (params['webhook_id'] is None):
raise ValueError("Missing the required parameter `webhook_id` when calling `webhooks_webhook_id_put`")
# verify the required parameter 'uri' is set
if ('uri' not in params) or (params['uri'] is None):
raise ValueError("Missing the required parameter `uri` when calling `webhooks_webhook_id_put`")
# verify the required parameter 'client_id' is set
if ('client_id' not in params) or (params['client_id'] is None):
raise ValueError("Missing the required parameter `client_id` when calling `webhooks_webhook_id_put`")
# verify the required parameter 'client_secret' is set
if ('client_secret' not in params) or (params['client_secret'] is None):
raise ValueError("Missing the required parameter `client_secret` when calling `webhooks_webhook_id_put`")
collection_formats = {}
path_params = {}
if 'webhook_id' in params:
path_params['webhook_id'] = params['webhook_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'uri' in params:
form_params.append(('uri', params['uri']))
if 'client_id' in params:
form_params.append(('client_id', params['client_id']))
if 'client_secret' in params:
form_params.append(('client_secret', params['client_secret']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json; charset=utf-8', 'text/plain; charset=utf-8'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/webhooks/{webhook_id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.765189
| 326
| 0.592228
| 3,049
| 27,871
| 5.177763
| 0.064939
| 0.050168
| 0.036612
| 0.035472
| 0.940774
| 0.929056
| 0.922151
| 0.910053
| 0.907012
| 0.899981
| 0
| 0.000957
| 0.325248
| 27,871
| 608
| 327
| 45.840461
| 0.838509
| 0.328872
| 0
| 0.746711
| 0
| 0
| 0.206233
| 0.04227
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036184
| false
| 0
| 0.023026
| 0
| 0.111842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
148c7e0b7e6638ac5921b25bb82940f44756ea4d
| 1,376
|
py
|
Python
|
PythonProjects/extras/aniversario.py
|
Luis12368/python
|
23352d75ad13bcfd09ea85ab422fdc6ae1fcc5e7
|
[
"MIT"
] | 1
|
2022-03-05T17:07:02.000Z
|
2022-03-05T17:07:02.000Z
|
PythonProjects/extras/aniversario.py
|
Luis12368/python
|
23352d75ad13bcfd09ea85ab422fdc6ae1fcc5e7
|
[
"MIT"
] | null | null | null |
PythonProjects/extras/aniversario.py
|
Luis12368/python
|
23352d75ad13bcfd09ea85ab422fdc6ae1fcc5e7
|
[
"MIT"
] | null | null | null |
nome = input("Insira o seu nome: ")
print()
print("____________00____00___00000___0000000__0000000__00____00___________")
print("____________00____00__00___00__00___00__00___00___00__00____________")
print("____________00000000__0000000__000000___000000______00______________")
print("____________00____00__00___00__00_______00__________00______________")
print("____________00____00__00___00__00_______00__________00______________")
print()
print("0000000__00__0000000__00000000__00___00__00000_____000000___00____00")
print("00___00__00__00___00_____00_____00___00__00__00___00____00___00__00_")
print("000000___00__0000000_____00_____0000000__00___00__00000000_____00___")
print("00___00__00__00_000______00_____00___00__00__00___00____00_____00___")
print("0000000__00__00___00_____00_____00___00__00000____00____00_____00___")
print()
print(f"♥ . ♥ FELIZ ANIVERSÁRIO! {nome} ♥ . ♥")
print("FELICIDADE: ao máximo!")
print("ÊXITO: em cada fase da tua vida!")
print("BONS AMIGOS: para todas as horas!")
print("SONHOS: se convertendo em realidade!")
print()
print("♥ BEIJOS NO CORAÇÃO! ♥")
print("¸¸.•*´¨`*•.¸¸.•*´¨`*•.¸¸.•*´¨`*•.¸¸.•*´¨`*•.¸")
print()
print("───▐▀▄─────────▄▀▌")
print("───▐▓░▀▄▀▀▀▀▀▄▀░▓▌▄▀▀▄▀▀▄")
print("───▐░▓░▄▀░░░▀▄░▓░▌▀▄───▄▀")
print("────█░░▌█▐░▌█▐░░█───▀▄▀")
print("─▄▄▄▐▀░░░▀█▀░░░▀▌▄▄▄")
print("█▐▐▐▌▀▄░▀▄▀▄▀░▄▀▐▌▌▌█")
print("▀▀▀▀▀▀▀▀▀▄▄▄▀▀▀▀▀▀▀▀▀")
| 45.866667
| 77
| 0.713663
| 194
| 1,376
| 3.896907
| 0.257732
| 0.296296
| 0.34127
| 0.37037
| 0.284392
| 0.256614
| 0.220899
| 0.220899
| 0.220899
| 0.193122
| 0
| 0.205793
| 0.046512
| 1,376
| 30
| 78
| 45.866667
| 0.230183
| 0
| 0
| 0.233333
| 0
| 0
| 0.783588
| 0.610022
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.966667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
14b6e22de2a8e54def35b067d3fcb63b05b4dbe0
| 143
|
py
|
Python
|
Tools/parametric_model/src/tools/__init__.py
|
ethz-asl/data-driven-dynamics
|
decf4bec19c9fc4a1789f5eb4d6e6003774c75d6
|
[
"BSD-3-Clause"
] | 38
|
2021-12-20T22:52:25.000Z
|
2022-03-15T09:45:01.000Z
|
Tools/parametric_model/src/tools/__init__.py
|
ethz-asl/data-driven-dynamics
|
decf4bec19c9fc4a1789f5eb4d6e6003774c75d6
|
[
"BSD-3-Clause"
] | 18
|
2021-12-21T09:36:20.000Z
|
2022-03-20T13:03:27.000Z
|
Tools/parametric_model/src/tools/__init__.py
|
ethz-asl/data-driven-dynamics
|
decf4bec19c9fc4a1789f5eb4d6e6003774c75d6
|
[
"BSD-3-Clause"
] | 8
|
2021-12-20T22:52:34.000Z
|
2022-03-30T21:13:59.000Z
|
from . import ulog_tools
from . import dataframe_tools
from . import quat_utils
from . import math_tools
from .data_handler import DataHandler
| 23.833333
| 37
| 0.825175
| 21
| 143
| 5.380952
| 0.52381
| 0.353982
| 0.265487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13986
| 143
| 5
| 38
| 28.6
| 0.918699
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2127df4d848e2d01828c4b33d9f80cac180f63d9
| 2,444
|
py
|
Python
|
Tasks/FileSystem.ScpUpload/test_task.py
|
esprengle/python-droppy-workspace
|
fd0aeb49c8d71187a9c0c7be2a0b55f1ad7cf60b
|
[
"MIT"
] | 2
|
2020-05-05T04:13:14.000Z
|
2020-08-02T21:46:57.000Z
|
Tasks/FileSystem.ScpUpload/test_task.py
|
geberl/python-droppy-workspace
|
fd0aeb49c8d71187a9c0c7be2a0b55f1ad7cf60b
|
[
"MIT"
] | 4
|
2021-03-19T16:11:47.000Z
|
2022-01-13T03:42:57.000Z
|
Tasks/FileSystem.ScpUpload/test_task.py
|
esprengle/python-droppy-workspace
|
fd0aeb49c8d71187a9c0c7be2a0b55f1ad7cf60b
|
[
"MIT"
] | 3
|
2020-01-03T07:25:41.000Z
|
2020-08-31T13:20:23.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import pytest
import task
def test_input_empty(tmpdir):
input_dir = tmpdir.join('0')
os.makedirs('%s' % input_dir)
output_dir = tmpdir.join('1')
os.makedirs('%s' % output_dir)
t = task.Task(input_dir='%s' % input_dir,
output_dir='%s' % output_dir,
username='myuser',
server_address='localhost',
remote_path='/')
assert isinstance(t, object)
def test_unfilled_username_arg(tmpdir):
input_dir = tmpdir.join('0')
os.makedirs('%s' % input_dir)
output_dir = tmpdir.join('1')
os.makedirs('%s' % output_dir)
with pytest.raises(SystemExit) as exc_info:
t = task.Task(input_dir='%s' % input_dir,
output_dir='%s' % output_dir,
server_address='localhost',
remote_path='/')
assert exc_info.type == SystemExit
def test_unfilled_server_address_arg(tmpdir):
input_dir = tmpdir.join('0')
os.makedirs('%s' % input_dir)
output_dir = tmpdir.join('1')
os.makedirs('%s' % output_dir)
with pytest.raises(SystemExit) as exc_info:
t = task.Task(input_dir='%s' % input_dir,
output_dir='%s' % output_dir,
username='myuser',
remote_path='/')
assert exc_info.type == SystemExit
def test_unfilled_remote_path_arg(tmpdir):
input_dir = tmpdir.join('0')
os.makedirs('%s' % input_dir)
output_dir = tmpdir.join('1')
os.makedirs('%s' % output_dir)
with pytest.raises(SystemExit) as exc_info:
t = task.Task(input_dir='%s' % input_dir,
output_dir='%s' % output_dir,
username='myuser',
server_address='localhost')
assert exc_info.type == SystemExit
def test_external_executable_na(tmpdir):
input_dir = tmpdir.join('0')
os.makedirs('%s' % input_dir)
output_dir = tmpdir.join('1')
os.makedirs('%s' % output_dir)
with pytest.raises(SystemExit) as exc_info:
t = task.Task(input_dir='%s' % input_dir,
output_dir='%s' % output_dir,
server_address='localhost',
scp_executable='/this/path/does/not/exist/scp',
remote_path='/')
assert exc_info.type == SystemExit
| 27.460674
| 69
| 0.577741
| 299
| 2,444
| 4.461538
| 0.180602
| 0.11994
| 0.097451
| 0.112444
| 0.835832
| 0.835832
| 0.823838
| 0.770615
| 0.770615
| 0.770615
| 0
| 0.00634
| 0.290098
| 2,444
| 88
| 70
| 27.772727
| 0.762536
| 0.015548
| 0
| 0.8
| 0
| 0
| 0.056988
| 0.012063
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.083333
| false
| 0
| 0.066667
| 0
| 0.15
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d11c4d3cf0b62a39c53807ad85b5ccf281f733f
| 18,941
|
py
|
Python
|
django_tctip/migrations/0001_initial.py
|
Hopetree/django-tctip
|
d15a47aab0f9f3e18d407df38fbd81f296cc4bfe
|
[
"MIT"
] | 6
|
2020-07-30T14:44:45.000Z
|
2021-11-16T13:53:32.000Z
|
django_tctip/migrations/0001_initial.py
|
Hopetree/django-tctip
|
d15a47aab0f9f3e18d407df38fbd81f296cc4bfe
|
[
"MIT"
] | 1
|
2020-07-15T02:11:55.000Z
|
2020-07-15T05:06:16.000Z
|
django_tctip/migrations/0001_initial.py
|
Hopetree/django-tctip
|
d15a47aab0f9f3e18d407df38fbd81f296cc4bfe
|
[
"MIT"
] | 1
|
2021-11-16T13:53:42.000Z
|
2021-11-16T13:53:42.000Z
|
# Generated by Django 2.2.8 on 2020-07-15 04:28
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tip',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='默认提示栏', max_length=20, verbose_name='名称')),
('create_date', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('is_on', models.BooleanField(default=True, verbose_name='是否使用')),
('minScreenSize', models.IntegerField(default=1200, verbose_name='最小显示屏幕尺寸')),
('headText', models.CharField(default='欢迎打赏支持本站 ^_^', max_length=20, verbose_name='最上面的文字')),
('siderText', models.CharField(default='公告&打赏&微信群', max_length=20, verbose_name='侧边栏文本')),
('siderTextTop', models.CharField(default='-72px', max_length=10, verbose_name='侧边栏文本高度调整')),
('siderBgcolor', models.CharField(default='#3a4c5b', max_length=30, verbose_name='侧边栏背景颜色')),
('siderTop', models.CharField(default='12%', max_length=10, verbose_name='侧边栏高度,可以px,em,或百分比')),
('notice_name', models.CharField(default='公告栏', max_length=10, verbose_name='公告栏名称')),
('notice_title', models.CharField(default='最新公告', max_length=20, verbose_name='公告栏标题')),
('notice_text', models.CharField(default='1. 最新公告<br>2. 公告换行', max_length=100, verbose_name='公告栏内容')),
('notice_flag', models.BooleanField(default=True, verbose_name='公告栏是否显示')),
('alipay_name', models.CharField(default='支付宝', max_length=10, verbose_name='支付宝栏名称')),
('alipay_title', models.CharField(default='扫描二维码打赏', max_length=20, verbose_name='支付宝栏标题')),
('alipay_desc', models.CharField(default='谢谢支持\n请用支付宝打赏', max_length=20, verbose_name='支付宝栏描述')),
('alipay_flag', models.BooleanField(default=True, verbose_name='支付宝栏是否显示')),
('alipay_qrimg', models.TextField(default='data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAAMrklEQVR4Xu1db4hcVxX/nTezTaBiZoJ+qtJNs7OxRXQjfhCsZCMUg1SToGBFsKnG1p0purFIFSrZUP9EGkwimVmCLU1AJJWWbNBC9UtSTYpCJZv4Kfu23fWD0A+yMxv6IZvM3iP37cxmd+a9efe+93bmvs6dr3Puufec83vn3D/nnkuwv77WAPW19FZ4WAD0OQgsACwA+lwDfS6+9QAWAH2ugT4X33oACwB9DeQnZx9j5geJsR2E+/Q5BLRgzhHRiA4/Fvg3ZehfzLglQFXi5T/WSsPTOjxUaXPlmRFynE8A2E7AkGq7MDpmfBTge8Po1v3PlAVxnRj/E07mHwS+UR3b/mctHoDeMjB/6p0vk8PPAPxF3Y66Rc+MRZDzN9TrL1R/sOPvSfSbn3znUbD4Lhi7ibAlCZ4bwYOBN1AXv9SRWzkE5CvuUTC+b7IC1iqVgZsCeGaxWHgxjrJTJzdjUUA8tVja8YqK3EoAyFVmn3PAz6swNIlGegMGjtRKheNRxrWl7P4qQ/hJlLa9bCPlVgVBKAByx969n+5dvkyMj/VSqKh9M/MNdgb21Ma2zevw8Ny+EL9Pi8drk43xtridfaR2aFutk9yhAJAukIBndZRnGu0y4+hiqfBTnXHlyu5LDuE7Om1Mo10GvhcWAhUAMHuZwJ/3FY7ppHB4ygjBGSPEGCfC/a3jYeBKtVh4WHWcucm5QeL66wQ81MaL8R8GT8AhLY+i2rcunSNoHxMfIPhOTl9bKBa+Hs8DlN2anxsUoP214pAZxm9ImDs+l6N76tOtIFiZC/Co6vIwN+mOOoyLvsa/nR0Jc6u6RoxLHzxevlEtDctla+Av1ANsrbjs13qhWAhtG1ewKO3zFfcSAbta2wrC7tpY4ZIKzy0V92AG+F17XKWTC6WhcRUe3aaJaqdQI0Zl3G0FNPtLAgC50+64s4z2lQPjyEKpMNEr2Tr1G9VOFgA+WrUAWKOUqMjq1VdiPcB6zYeFausBrAfo/K1aD7BGP3YOcFcZYa7FhoDuaiDqh2pDgA0BNgS0aiDJVYDcnMKm+qd1/EGtWHhTh17SpsIDyC1WcL1tqzZI2CiKMGkVkCvPHHCIXtY1JoPnmbFfdefSeAB4W7Sb6ucJGNVRBjNqDD5UKw2fUW1nCgCkzM6melV13K10zDxdLQ3vVG1vtAfYWpk5A9DjqsK00gnKblM9zjUGAAHnCTo60JloGw2AfNmtEiGnI/xaWp19fAsAAzeCgtCpCggLgHBNGe0BLADCDehH8YEJARYAFgC+OQWqarEhIFxTNgQ0dGQngXYSGDsjKImdwKA0rfBv+S6FnQM0dGFDQDhsbAiwISBS7mZXTgPtKiD8C7bLwA46SmMI6HRAowIHBharxYLy7qnRISBfdqeIsFdFcD+atAIgV3HHHfhkFysoQjA/oXMIZjQA4syIGXytWhxWrhlgyjKwaWNZU8AB7VOwuUciCDVQdkr18KvJ12gAyEHKXABH1A+oKsJTBnhe5yuQbUwDgI68cWiNB0Ac4XTaWgAYuBGkY8C4tBYAFgBG7ATGBbJuexsCDNsI0jVgXHoLAAsAc3cC46Jbp72dA9g5QOw5wEo5PPF6G/D68mpY2f0rCI+0KkMsZfOmVcrw9gHKM1f9ik3q7CZ2qBByoVoqKG/q6HiuOLTeHgvX51p5MPDfarHQsbiXwmHQ7BmA21K6GbjES9n9poCgkYd/GEBbBQ9m3GTwLtWLFo17DG8R8GAb8JnHa6Xhk3EMlmRbr56RuHPeD/Qs8Ifq04VvdeovFACB5VIaXOUFBhB1LEWWpMC+vEJKzOoWiZJ9bK24rwL4WtDYjZAbPEigwaAxCsaPwmokhgKgUXjpOhE+vuGG3KAORAaHak8VTuiw31KZfTIDPq3TxiRa6f6Zsg+HnSmEAkAKFeYFTBK8LQ4y/4lvD3xbN1R5wN9cP0eML5ksX+DXD/pZrTj087CxKwFAMklj4UQG/glknqwWH7gepgi///OVdz8FLJ/zmwtE4detNoLFS7XSjoMq/SkDwJthrxSMHiPCh1WY94pGJlMQaEpQZiLMBYaNsQGCXxDwaBhtr//33H4Gx3TCnRYAPBD89sYXkKWvAvRZMD5ChE8aIPgigFkC5gVwneriok7JdJXxyzcSiLGHIUYIpHXfX4V/FBq5ugHxHIFmBeMKnOx5XcBrA6B1oDLhAQ4ppy5FETSsjWoByDA+qv97dQ5QD5x9q/KJT5ed1zV4a5+xARBfCMuhlxqwAOil9g3o2wLAACP0cggWAL3UvgF9WwAYYIReDsECoJfaN6BvCwADjNDLIVgA9FL7BvRtAWCAEXo5BKMBkKu4bU+/rCqLeVE1waOXCja970gAkG/qMXinAzwAwR8CMMiE98OFpc0Abw6kY3qfiJVf92KmyyCvf/l7j4jeW2a+QkvZV3WPf8PHvkLh3fVzaK8QXMPtgbNx+8mdcvc6GYyIZUzXni5cUB2HNxZZh3igvstrL/hClA9CCwDyQATMj4PxuTiFH3WEjETLeHuZcDrszTxd3lvLsydA/MNmu0Yp291RFC95bC3PvAyi1fuSMsuIbw/sVgHVStranavrMoKYzyyUhp/QkUsZAGl7Q1cqgYFf81L2qIpCVZTmf/mCzy4Uh7Uuva56EqKrrf2qXgsPunoumHfqAFIJADIjyGG8YPRXH2BBoZgZEwaAwExh4M1qsaBVBHvl63cnQJBJrOt/iqnnQfWXdbKfZcehAOj0imaY0kz433tOfll8JW5+gGkASOICjBIA0pwPuBqrwceqxeEfxwGkcQAIqLqSuAfIV2bPEfgbbZ5KvqFLNA7KTMdNSohjmGZbOTsn0IRfKRomXKuOFZSrjPiNxzgAJPBCqpIHyAe9Haw52UjCyCo88mV33u8Bad0vo7WvvgVA1GvHKsbaCJqkYqMFQEMDFgArirAeoOWT0KljuxFfehDPvvEA3ZoEWg9wF2r5ilsjYEvLwj3SRlDcfQA/jyTvQ/BSdlBn4yt0H8AC4K655TNwRHSiCQJZw5CXBkZ1FN7kFhcAXliqzO4j5hNy0suMCwye0NkFVFoFWACs/97X3Qm4lZ2OYnzJMQkAJBFarQdIQosReFgARFCaSpONmgSq9K1DYwGgoy0N2rQAILCQtOJhkIZKOpJ2LQQ0Zq17GVDakiVgWoCma8WhszrCpgYAMrEkxnGwjk460XYFAFEfUZYDZ2CqWizsVxV4IwGwUvD6zmEm+QYyzTPzVJx6Qa1HurqripVVgDgM0CCBp4QzcET3XKYrAIj9dKzGucNGAsCvAplqAkcQgJvl5AV53m5KFejS+A74/Fp6+ep4tTi8TZVH15aBaXsyxk+BHUqxRUoI0TGSH21relqTRvfQqyse4IMBAHfUYVxsNQZHzAiKC4CkPJ0FgKIlkj4MUuw2kMwCIEA1SSmmlb0FQItGdE4DbQgI/t5X7wUIrumsKJICug0Bir54IzxAvuKeJ2C19rDOvQALgJSHgCBACeBQrRhe1dQCIO0ACHpTUHEr2AIg5QCIexhkAWABEPthjK7tBAalaivOv+RrmrtVi0Em9WVs9DKwzzzAzBmA2h6dUAGAbp6bBYD6x9I1D9B4gWOKgOCCDz5o4MbtI51DEgsAAwHge7jiW2M4fu3bfgFAqg6DVFx9UjT9AwD/6+U686WuhYCkjKvCxwIgJSFAxZhRaCwALAASWR+bvgwMWkbaEJDQvXkLgIYG7M2gFUUkfRoYdyPIeoA+3wruGgByZfeGQxhu1bdurIkyodNt41s7r8Ek7nj72QP4PqHqFUmUNYKWMheiXpDUNXAneplZQ8QTvm/oKr6i2ZH/ZLJJoakJAbnK7HMO+PkkjdVtXkz4S3WssCdOv33rARp1Ai8TcF8cBfayLRN9szo2dC7OGPoWAN4MuOwecgi/iaPAXrVl0CvV4tBjcfv3bvD43OWLei8g8LqcYkZQ1yaBHgCOz+WcTfUXOz2nHlfBG9T+NbGUPZjUHMW3RIyiwdr3FeYGHa7PRZ1cBwFIUHabzv3A0Kzg5gAlCPie+rMOUDT+7WDCNWZ6FUuZU0kZv7kXQAx5rO3VCZJfPy9l90Xto7XkDJhOLpSGxlU/hrUngl7eBPN4rTR8RrW9pFMGwCoQJt1R+YYuMT7DhIeacwN5s1Wn40RpiUBM0wLiJpHzFt/KvBHVKCrjkvMina8sjKecXyBiuZlmyRrVjKnWsWgDIEwY+3+6NGABkC57JT5aC4DEVZouhhYA6bJX4qO1AEhcpeliaAGQLnslPloLgMRVmi6GFgDpslfio7UASFyl6WJoAZAueyU+WguAxFWaLob/B/rgQSaBDuTUAAAAAElFTkSuQmCC', verbose_name='支付宝栏二维码图片地址')),
('weixin_name', models.CharField(default='微信', max_length=10, verbose_name='微信栏名称')),
('weixin_title', models.CharField(default='扫描二维码打赏', max_length=20, verbose_name='微信栏标题')),
('weixin_desc', models.CharField(default='谢谢支持\n请用微信打赏', max_length=20, verbose_name='微信栏描述')),
('weixin_flag', models.BooleanField(default=True, verbose_name='微信栏是否显示')),
('weixin_qrimg', models.TextField(default='data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAAMv0lEQVR4Xu1dX4hcVxn/fXfuJBsstoI+VWlqRWoRTcUHwUoSae38W5OoYEWwrcb658WNRdruTOiWzKyRFk3Al2JLUxBJJSUbdv5sS6WpJkVB6SY+BQrdPgh9EJINYtPszP3kzM4kk5l755xz752Zc52zjzvf+c7353e/8/f7DsH+TbUFaKq1t8rDAmDKQWABYAEw5RaYcvVtBLAAmHILTLn6NgJYAOhbIN+YfwBEnwHjDgLdqs/BvwUz30JEO3T4MfBPB/QPJu8KmC56Ke+P9a8trurwUKXNvTq/g1q4E45zh+PRp1TbyegY+BjAH5LR9f3uAmgyvH+D8FeHnAvL95ermjz0loGZV5/Iuc3UoyB8VbejMdKvg/nPvMV5unbvob/E0e/sK6UCM34Axm4AN8fBcxQ8mLAClxZ19FYeAvIrpcPE+LHJBug1KgOXibxHq5nF56IYO2l6A1hvOfyjxv2Vl1T0VgJAoVEsAXRIhaFhNOsEPLWcLf8mjFyztYO/ZIcfD9N2wm2UQSAFwDf+VL7tg6tXzhDw8QkrFbJ7vtBEK7OSPbymw6Ad9j38PikRr183Bv6ennHvW9q9cGmY3lIAdELgYzrGM42WPDq8nD/0hI5chXrxeRB9X6eNabQeeT+sS4ZAOQDqpTNE+LKvcoyjICyZoDgzi9XDHBHd5vM1nK1ly/eoyplpPL7dQarmgO4a4MX8LhMvOHC0Iopq39p0jL1M/BCBBianHnkv1zOL34oUAQqNkgghA8yJsW85VzbC+V0F976+cMvG+xurPiBY91LeLtXlYaFR2gXgdT/np7eld8jCqrYTIzYIkhfgC9Vs5c6oAGA/BtVsWRo9IuoVqnmhXjoNwk6fxrur2fJpFaa5lfn9Dju/G6BlHK3mynMqPMZNU2iUQvlJ6sSwjMdtgG5/cQAgv1KcIya/lcNT1Wx5YVK6Des3rJ8sAHysagHQY5SwyJrUV2IjwI2Wlw3VNgLYCDD8W7UR4Ab72DlA1xyy0GKHgPFaIOyHaocAOwTYIaDfAnGuAsTmVOv91ud14sFy7tAbOvSCNhERQGyxpjk9sFUbpGwoQ8SwERQXAHKN+YccOC/oOpPBa5zifao7l8YDQHwFzSvNkwDEFqv6H+OSR96BenbxmGojU5aBHZ0vqsrdT8fMq7Vc5W7V9kZHgHyjeIxAD6oq00/XRPN21eNcUwAQvD+vbgWdibbRACjUSxdBuEVd9QFK5X18CwADN4KC0KkBCAsAibHMjgABJ1UWAMMt8P8zBFgAaGD9OqkFwHVb2CHADgFqlznsJNBOAiPfCIpjI8guA3uAaFcBoaYAsHMAOweQHtZ1TWSXgR1L2DmAnQMYMQcYdkCjMiAweL2WrSjvnhodAfKN4hKB9qgoHkCTuGWg0CNfL4pElVB5iR68h7UOwUy+Fh5xRnyumi0r1wwwZQjoAlnUFHBazl5V8DPzpRa1llQPvxIxBxBCirsALtyHVA0h6Dx4azpfQTvsGnQfQEfXqLRGDwFRldNpbwFg4CRQx4FRaS0ALACMWQVEBbNOezsEGLYPoOO8OGgtACwAbHawXQUMxhLZeYJ0rzlsaIkjrIXhEccksF0Or5Wq+fQ/falh+fr8q0TOff3GcGfcj5hWKaOz+/ZWQLFJ9d3EoAoh4FO1bEV5UycMgMO06eyxvNPflpn/VctVhhb3kkaA2UbpGAN+V7pPuzPuPlNA0CkP8yQR+VXwuOylvJ2qiRYdXm+SqIba/0eYq2bKR8M4ahRthPNTnDrpC3rCH6qZ8neH9SsFQGFlfj/8yqV0uIoEBiIaWopsFIr38pSVmGVAq0jU5lzi4AkQfzNIdiP0Bm8n0PYgGQn4uaxGohQAmxkuG+cB+sSoHTkq/kx8oJapHNHhn6/PP0LkPKvTxiRaEf5b1LpHdqYgBYBQKrBokkkaB8jigZe3zKS/pztUtYeBKxvHCXR/AtT0EZEPVrOVskx2JQBshsTkFU5k9v7W2oJHVu5dPC8zhN/vmdfmP5e6Ssd95wJhGI6pDYOfr2Ur+1W6UwZAe4a9WTD6JwA+rMJ8UjTiMoUDWtpAc0EWAmUyChCkrzoVJhRktJP+XYR9OHhGZ7jTAkAbBK8d/Ao2vK8D+CIRfRSMz05ccfA6mN5mx1sj8Hm47us6JdNV5BdvJDhwMuxhBxxo5fur8A9Jcxke3gHR20R8dgPNk7qA1wZAv6CdCw/KV5dCKjq0mWoByLj67qy7A2ffcfUj49NEc03X4f08IwNAJqT93WwLWACY7Z+RS2cBMHITm92BBYDZ/hm5dBYAIzex2R1YAJjtn5FLZwEwchOb3YEFgNn+Gbl0FgAjN7HZHRgNgNn6Qb+nX9oWbbmtddULHma7YLLShQKAeFPPY+9uh1Of9NC6CXC2E/AfuSo8A6KZQDpu81B+3QvAGRBuEvwYeM9heo+pddbduuWE7vGvXPZNis7W9x4QLrlb3Rej9pNbKe5xmHZ4xKv1TOWUqhyCThxZX/1gY2e7fco7FeaD0ALA5qPRzoPk4UsRCz/q6KlNKx5NJPKejfpsbH/HhXrpCAg/u/Z/UcrW9XaHMbzgkW8UXyDQtXxJccsovS29WwVUnfsKb/XeCGLwsVq28rCOwZQBkMA3dMGEX6W3uodVDKpiNL8b0gx+sZataCW99kSSt/r7VU0LD0o991Le3TqAVAJA+16g5zxt8lcf7EC1mzEyAASmuDPeqObKekWwN8u7i9fHnvTpV+nq+ZD6y8q3n0XfUgAMe0VTZjRDfl/nNM1GvR9gGgDiyH9QAoDsVrAhTh4qBoOfqWUrv4giq2kAGFJ1Jd4IUKiXjoPw7X7jMfO7Dmhug5qrUS8lRHFMt62YnVOLFgJK0WhVGfGTxzQAjC8CBLwdrDvZiMPJKjzy9eKa3wPSALS+jIEVQEC2ECY0BxgnAEJlnao4axQ0cRnGAqBjgWlMDrVDQI8FLAA2jWHaHGB8k8CQ9edGEd5VeI5qCBB95xvFSwS6uVeOsBtBUfcB/AAp8iHSM+ntOhtf0n0AGwGuu1s8A0egIz0gOOfOuLt0DN7lFhUAgs9svbTXAx8Rk14Gn+IUL+jsAqrtA9gIcEPg6c0JcGfc1TDO7wwpkXYCVaKhCo2NACpWGgFNHBEgDrEsAOKwYggeFgAhjKbSZJSTQJX+VWmGFJJWOgxS7UdGN7YIIGatzLwnoH7PgJzibByE1Vq28qJMid7fkwKAzsWS0MfBOjYZRjsWAIR9RLktOGOpmivvU1V4lABo1+NBShzh7iLQGghLUeoF+Rzpaq0qOquAJ8WlECZeaqH1lO65zFgAEPXpWJ1zh1ECIF8vDlQgU73AEQTgbjl5Yqwu58pLqkAXzmeCeJD72p94dbyWrdyuymNsy8CkPRrlZ8CgUmxhD4N0nORHO3A97TqR1qHXeCJAwl4O9TV4zKeBMQAgclFsGwE0vBD3WYBG176kcQ11NgIoesICoM9QsiLENyzN7BAQCLNuXoDIM9BZUdgIEGDSuAzTz34UEaBQL50E4VrtYZ28gLj0tEPAhIaAIEAx84FaTl7V1AIg4REg6lawBUDCARD1MMgCwAIgOfsAQ65qK47A6le64/oyRj0JnKoIMCSPTQoA3XtuFgDqH8vYdgLbbw6831wCIbDggx8SutlHOockFgAGAsDPuX41huOofTtFALixVoHJh0HSOB8jwdQAIDi93LzTwBj9K2VlAZCQIUDqyZAEFgAWALGsj41fBtohwD9E2AhgI4CNANnyadURdCyngarCxEFnI0DMESDfKF4g0Kd9nKO13IjDuTIefrXzetpEkjfu+wCRt4LHNQfIrcyfcNgZfEKVcYmJ59Iz6VNhEyRlDtX5XdysIQ8Lfoknqq9oDutvagFQaBRLAB3ScYZptAx+pZatZKLINbUA6LxOfYaIbo1iwEm2ZXjfqWUXj0eRYWoBIIw22ygdYODXUQw4sbaMl6q58gNR+w/K5QubGDIkXU4pOXTIHEJrriNdBQjDbZ7mtZ4b9px6VAOPpD3Ty+621P645ih+JWIAKDmsX7/ATCPFcnZBAGqiebtOfqASALogaP239Rg7/FPT3w4GcA7gE+5M+rdxOV/YoJ3hDF66ViKG8Ya7zd0bto+BkjOMo9VceU71Y+hNDxP3Jhg8V88uHlNtL+iUAdBl2jYCIQPP+wJAd12bG3jC6BP6c9qKrHqMyyDvzfTMlpWwTlHRQHy9Ol+ZjKewadhyM92SNWGfz9UGgEwZ+3uyLGABkCx/xS6tBUDsJk0WQwuAZPkrdmktAGI3abIYWgAky1+xS2sBELtJk8XQAiBZ/opdWguA2E2aLIYWAMnyV+zSWgDEbtJkMfwfO0cuF3rAnEoAAAAASUVORK5CYII=', verbose_name='微信栏二维码图片地址')),
('wechat_name', models.CharField(default='微信群', max_length=10, verbose_name='微信群栏名称')),
('wechat_title', models.CharField(default='扫描二维码进群', max_length=20, verbose_name='微信群栏标题')),
('wechat_desc', models.CharField(default='加入微信群\n获取更多信息', max_length=20, verbose_name='微信群栏描述')),
('wechat_flag', models.BooleanField(default=True, verbose_name='微信群栏是否显示')),
('wechat_icon', models.TextField(default='data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeCAYAAAA7MK6iAAAFx0lEQVRIS62WeWxUVRSHv/teZ6btzNhKxUKpUChK2MsikWhCWERlDTE0wQTBSIwGIRCUJWAkxg0x7BJwASXEP4wBIVCWsoMsLmCLbAKlgE5Liy3tzJTO8t41d950yhRoB8P96+Wdc+93z7nnnvsTNB1nncPQxWzCYhAadsy7PBL7IaJuknLsrMMrl9HXV9kwucEMv6Xl8ohcgZAjIrAwIBNjNOulAQ7AMP8loC2ip3ex8rfApx256LZdpGm51Ej+d5TN7UDBUwRUG4vp6Z9tgYtchWRow6huLkRla7CrMP7HSFLRC/DL0YIi51CStT2EuEekJpi3QUYPWnNY31I5A5odhArlAYZbgFfuFBQ/sgebHEqwyWTTByKJvNQBDHcNpmtyV1rp6YRkiLJwOYd9Ryn07ac6eB30ZBB2kAkUhYo6TIXglKsecee2JRg+eqb2Z2GbBYxJG0mSUN53jyuBq6y6uZYVlSsIEwCRmmhFGoIit4wrJsPL2EfzWdd+La2S0hPK4daaAiaWvkatUQ1aSstwoar6pLsxP4aPge7B7HtyO8lacgRaHrhBG0fmXRuoN+rxGXU8Zm8VsW2+tY3xJfkYQtWr3uKGBb9HwTKMDRtHuuxjgKtfZOJXZeuZf2kOCzp9wPR2b8YWM6RB/rlXOVJziL29dtHD2S1im1z6Ft9VrgHdnQD4tyjY8DEsbTSFXbbEJq0u/5JpV6bzWYfFzMqaFgce/Vc+R2sPcqrX73R0dIjYCmv2M/z8ENB0iGTs/tdO8GsUHPbyXvaHfJA9P26310Mesm1tEdFe02A0MfEEyynxX+G0/ywBM4ApTDb7tnEpcJGK4N8gw6C7Yn3qzoUFvyiwhJCPJTkrmZn1dotpUg7F3jNsu1lAa3tr+qf1IcuehUt3kqw5qDZqOFh7mLU3vqLw1nbQU0DY4opOcNxlFVfIx9zs9/kkZ2GL4IPVhyms2s9rWRPJTel4X3+18GLPUuZcmweRomuEC45FwYafZ13DONJ7d7Pgy3UlfOPZwLyO7+DWXZTUlWJKk87OTrF5ZfXlVIWq6O62iu7jfz5nfum7oDtjaRccjYIx0EKSHd23MzxjyH3hS66uZGjGIHq7eiGl5KkTeQSMABefKcKhW+1zXPEEfqrYxN/PldIuuS2mlAwsHsIv3kOgqyaj7vHPDWD1dNXRzZbHgT67ae3IuAteEazkW89GZufMjNnWlH1NSIaZ2vYNNGFV8aabWyn2n2FO9gxS1PkCy/9ZzYxLU8FmRS04cgdYeQR9DHQOYmOP9XRKjT+/Iu9pjt06wZtPTGmxDpo67Kray4t/jgJNtS0dwaEmYCHhtp/13TcwOWti3HxPoIwdlbt5PXvSA4N33NzNiDNjQFNZUeCDTcBmgMcd2Vx8+g9K/df46PIiTAzy3HkYGJSHKljdbekDgz8tXcK8S7PAru61SvaBpuB6eqT25eX0sSy9vpxas8LSKYbFSrVncHLAcbq4OicMv23U0+/Ec5yr/8O60xHw/iZg9Ve9+yEf2OzWYx8bqtH4eT59FNv6bsKuq3vZ/FCVP+PcXFZc/wwcVrQWeN89wDGJ06gF4+ABP6NajWNV92V0cLZvlrykZBWzzk+D5FSIVn0jOAHhEL+6hKCfNkk5TMmaxMjMEXR25uK2uVDPpWoqF7wXeCnzBX6s2MKUM5PA1hgtJmFBocuDRttIeu8VYHPxGAEIhRDCRbajHU4tlYAM4gl6CASr6Zk2gDGZI1nmWYU/IqV0sKSPR7DH+Sl2MUcplwfW0THRboAZtDSX6smaLaLXCNeBKRA2JzGZ41JizywQFLhaYzMuY9fdEfhDH+ocoztUV1hpa58cb/3Z6ZxFmvY5fiX0Hjq5oYwhXUCV/IHjvgmNp7rTuYgUbXZELt1+iBtQUaobqYR8LQVoSa/wfHVNfDntdI8lyZyCKfphkIloRrskmhhp+kjSz2Oa31Pj/4J8S8H/Bxh7K0IQhCeAAAAAAElFTkSuQmCC', verbose_name='微信群栏icon')),
('wechat_qrimg', models.TextField(default='data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAIAAAACACAYAAADDPmHLAAAMv0lEQVR4Xu1dX4hcVxn/fXfuJBsstoI+VWlqRWoRTcUHwUoSae38W5OoYEWwrcb658WNRdruTOiWzKyRFk3Al2JLUxBJJSUbdv5sS6WpJkVB6SY+BQrdPgh9EJINYtPszP3kzM4kk5l755xz752Zc52zjzvf+c7353e/8/f7DsH+TbUFaKq1t8rDAmDKQWABYAEw5RaYcvVtBLAAmHILTLn6NgJYAOhbIN+YfwBEnwHjDgLdqs/BvwUz30JEO3T4MfBPB/QPJu8KmC56Ke+P9a8trurwUKXNvTq/g1q4E45zh+PRp1TbyegY+BjAH5LR9f3uAmgyvH+D8FeHnAvL95ermjz0loGZV5/Iuc3UoyB8VbejMdKvg/nPvMV5unbvob/E0e/sK6UCM34Axm4AN8fBcxQ8mLAClxZ19FYeAvIrpcPE+LHJBug1KgOXibxHq5nF56IYO2l6A1hvOfyjxv2Vl1T0VgJAoVEsAXRIhaFhNOsEPLWcLf8mjFyztYO/ZIcfD9N2wm2UQSAFwDf+VL7tg6tXzhDw8QkrFbJ7vtBEK7OSPbymw6Ad9j38PikRr183Bv6ennHvW9q9cGmY3lIAdELgYzrGM42WPDq8nD/0hI5chXrxeRB9X6eNabQeeT+sS4ZAOQDqpTNE+LKvcoyjICyZoDgzi9XDHBHd5vM1nK1ly/eoyplpPL7dQarmgO4a4MX8LhMvOHC0Iopq39p0jL1M/BCBBianHnkv1zOL34oUAQqNkgghA8yJsW85VzbC+V0F976+cMvG+xurPiBY91LeLtXlYaFR2gXgdT/np7eld8jCqrYTIzYIkhfgC9Vs5c6oAGA/BtVsWRo9IuoVqnmhXjoNwk6fxrur2fJpFaa5lfn9Dju/G6BlHK3mynMqPMZNU2iUQvlJ6sSwjMdtgG5/cQAgv1KcIya/lcNT1Wx5YVK6Des3rJ8sAHysagHQY5SwyJrUV2IjwI2Wlw3VNgLYCDD8W7UR4Ab72DlA1xyy0GKHgPFaIOyHaocAOwTYIaDfAnGuAsTmVOv91ud14sFy7tAbOvSCNhERQGyxpjk9sFUbpGwoQ8SwERQXAHKN+YccOC/oOpPBa5zifao7l8YDQHwFzSvNkwDEFqv6H+OSR96BenbxmGojU5aBHZ0vqsrdT8fMq7Vc5W7V9kZHgHyjeIxAD6oq00/XRPN21eNcUwAQvD+vbgWdibbRACjUSxdBuEVd9QFK5X18CwADN4KC0KkBCAsAibHMjgABJ1UWAMMt8P8zBFgAaGD9OqkFwHVb2CHADgFqlznsJNBOAiPfCIpjI8guA3uAaFcBoaYAsHMAOweQHtZ1TWSXgR1L2DmAnQMYMQcYdkCjMiAweL2WrSjvnhodAfKN4hKB9qgoHkCTuGWg0CNfL4pElVB5iR68h7UOwUy+Fh5xRnyumi0r1wwwZQjoAlnUFHBazl5V8DPzpRa1llQPvxIxBxBCirsALtyHVA0h6Dx4azpfQTvsGnQfQEfXqLRGDwFRldNpbwFg4CRQx4FRaS0ALACMWQVEBbNOezsEGLYPoOO8OGgtACwAbHawXQUMxhLZeYJ0rzlsaIkjrIXhEccksF0Or5Wq+fQ/falh+fr8q0TOff3GcGfcj5hWKaOz+/ZWQLFJ9d3EoAoh4FO1bEV5UycMgMO06eyxvNPflpn/VctVhhb3kkaA2UbpGAN+V7pPuzPuPlNA0CkP8yQR+VXwuOylvJ2qiRYdXm+SqIba/0eYq2bKR8M4ahRthPNTnDrpC3rCH6qZ8neH9SsFQGFlfj/8yqV0uIoEBiIaWopsFIr38pSVmGVAq0jU5lzi4AkQfzNIdiP0Bm8n0PYgGQn4uaxGohQAmxkuG+cB+sSoHTkq/kx8oJapHNHhn6/PP0LkPKvTxiRaEf5b1LpHdqYgBYBQKrBokkkaB8jigZe3zKS/pztUtYeBKxvHCXR/AtT0EZEPVrOVskx2JQBshsTkFU5k9v7W2oJHVu5dPC8zhN/vmdfmP5e6Ssd95wJhGI6pDYOfr2Ur+1W6UwZAe4a9WTD6JwA+rMJ8UjTiMoUDWtpAc0EWAmUyChCkrzoVJhRktJP+XYR9OHhGZ7jTAkAbBK8d/Ao2vK8D+CIRfRSMz05ccfA6mN5mx1sj8Hm47us6JdNV5BdvJDhwMuxhBxxo5fur8A9Jcxke3gHR20R8dgPNk7qA1wZAv6CdCw/KV5dCKjq0mWoByLj67qy7A2ffcfUj49NEc03X4f08IwNAJqT93WwLWACY7Z+RS2cBMHITm92BBYDZ/hm5dBYAIzex2R1YAJjtn5FLZwEwchOb3YEFgNn+Gbl0FgAjN7HZHRgNgNn6Qb+nX9oWbbmtddULHma7YLLShQKAeFPPY+9uh1Of9NC6CXC2E/AfuSo8A6KZQDpu81B+3QvAGRBuEvwYeM9heo+pddbduuWE7vGvXPZNis7W9x4QLrlb3Rej9pNbKe5xmHZ4xKv1TOWUqhyCThxZX/1gY2e7fco7FeaD0ALA5qPRzoPk4UsRCz/q6KlNKx5NJPKejfpsbH/HhXrpCAg/u/Z/UcrW9XaHMbzgkW8UXyDQtXxJccsovS29WwVUnfsKb/XeCGLwsVq28rCOwZQBkMA3dMGEX6W3uodVDKpiNL8b0gx+sZataCW99kSSt/r7VU0LD0o991Le3TqAVAJA+16g5zxt8lcf7EC1mzEyAASmuDPeqObKekWwN8u7i9fHnvTpV+nq+ZD6y8q3n0XfUgAMe0VTZjRDfl/nNM1GvR9gGgDiyH9QAoDsVrAhTh4qBoOfqWUrv4giq2kAGFJ1Jd4IUKiXjoPw7X7jMfO7Dmhug5qrUS8lRHFMt62YnVOLFgJK0WhVGfGTxzQAjC8CBLwdrDvZiMPJKjzy9eKa3wPSALS+jIEVQEC2ECY0BxgnAEJlnao4axQ0cRnGAqBjgWlMDrVDQI8FLAA2jWHaHGB8k8CQ9edGEd5VeI5qCBB95xvFSwS6uVeOsBtBUfcB/AAp8iHSM+ntOhtf0n0AGwGuu1s8A0egIz0gOOfOuLt0DN7lFhUAgs9svbTXAx8Rk14Gn+IUL+jsAqrtA9gIcEPg6c0JcGfc1TDO7wwpkXYCVaKhCo2NACpWGgFNHBEgDrEsAOKwYggeFgAhjKbSZJSTQJX+VWmGFJJWOgxS7UdGN7YIIGatzLwnoH7PgJzibByE1Vq28qJMid7fkwKAzsWS0MfBOjYZRjsWAIR9RLktOGOpmivvU1V4lABo1+NBShzh7iLQGghLUeoF+Rzpaq0qOquAJ8WlECZeaqH1lO65zFgAEPXpWJ1zh1ECIF8vDlQgU73AEQTgbjl5Yqwu58pLqkAXzmeCeJD72p94dbyWrdyuymNsy8CkPRrlZ8CgUmxhD4N0nORHO3A97TqR1qHXeCJAwl4O9TV4zKeBMQAgclFsGwE0vBD3WYBG176kcQ11NgIoesICoM9QsiLENyzN7BAQCLNuXoDIM9BZUdgIEGDSuAzTz34UEaBQL50E4VrtYZ28gLj0tEPAhIaAIEAx84FaTl7V1AIg4REg6lawBUDCARD1MMgCwAIgOfsAQ65qK47A6le64/oyRj0JnKoIMCSPTQoA3XtuFgDqH8vYdgLbbw6831wCIbDggx8SutlHOockFgAGAsDPuX41huOofTtFALixVoHJh0HSOB8jwdQAIDi93LzTwBj9K2VlAZCQIUDqyZAEFgAWALGsj41fBtohwD9E2AhgI4CNANnyadURdCyngarCxEFnI0DMESDfKF4g0Kd9nKO13IjDuTIefrXzetpEkjfu+wCRt4LHNQfIrcyfcNgZfEKVcYmJ59Iz6VNhEyRlDtX5XdysIQ8Lfoknqq9oDutvagFQaBRLAB3ScYZptAx+pZatZKLINbUA6LxOfYaIbo1iwEm2ZXjfqWUXj0eRYWoBIIw22ygdYODXUQw4sbaMl6q58gNR+w/K5QubGDIkXU4pOXTIHEJrriNdBQjDbZ7mtZ4b9px6VAOPpD3Ty+621P645ih+JWIAKDmsX7/ATCPFcnZBAGqiebtOfqASALogaP239Rg7/FPT3w4GcA7gE+5M+rdxOV/YoJ3hDF66ViKG8Ya7zd0bto+BkjOMo9VceU71Y+hNDxP3Jhg8V88uHlNtL+iUAdBl2jYCIQPP+wJAd12bG3jC6BP6c9qKrHqMyyDvzfTMlpWwTlHRQHy9Ol+ZjKewadhyM92SNWGfz9UGgEwZ+3uyLGABkCx/xS6tBUDsJk0WQwuAZPkrdmktAGI3abIYWgAky1+xS2sBELtJk8XQAiBZ/opdWguA2E2aLIYWAMnyV+zSWgDEbtJkMfwfO0cuF3rAnEoAAAAASUVORK5CYII=', verbose_name='微信群栏二维码图片地址')),
],
options={
'ordering': ['-create_date'],
'verbose_name': '公告栏',
'verbose_name_plural': '公告栏',
},
),
]
| 344.381818
| 4,541
| 0.906394
| 781
| 18,941
| 21.880922
| 0.517286
| 0.020598
| 0.023173
| 0.010533
| 0.556791
| 0.535257
| 0.532916
| 0.520276
| 0.517643
| 0.517643
| 0
| 0.125503
| 0.041286
| 18,941
| 54
| 4,542
| 350.759259
| 0.815574
| 0.002376
| 0
| 0
| 1
| 0.085106
| 0.852175
| 0.814862
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.106383
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0d2c89d5016f2b6e30fe284fd3088725e793ccbe
| 79,412
|
py
|
Python
|
pycatia/space_analyses_interfaces/sectioning_setting_att.py
|
evereux/catia_python
|
08948585899b12587b0415ce3c9191a408b34897
|
[
"MIT"
] | 90
|
2019-02-21T10:05:28.000Z
|
2022-03-19T01:53:41.000Z
|
pycatia/space_analyses_interfaces/sectioning_setting_att.py
|
Luanee/pycatia
|
ea5eef8178f73de12404561c00baf7a7ca30da59
|
[
"MIT"
] | 99
|
2019-05-21T08:29:12.000Z
|
2022-03-25T09:55:15.000Z
|
pycatia/space_analyses_interfaces/sectioning_setting_att.py
|
Luanee/pycatia
|
ea5eef8178f73de12404561c00baf7a7ca30da59
|
[
"MIT"
] | 26
|
2019-04-04T06:31:36.000Z
|
2022-03-30T07:24:47.000Z
|
#! usr/bin/python3.6
"""
Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-06-11 12:40:47.360445
.. warning::
The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only.
They are there as a guide as to how the visual basic / catscript functions work
and thus help debugging in pycatia.
"""
from pycatia.system_interfaces.setting_controller import SettingController
class SectioningSettingAtt(SettingController):
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| System.IUnknown
| System.IDispatch
| System.CATBaseUnknown
| System.CATBaseDispatch
| System.AnyObject
| System.SettingController
| SectioningSettingAtt
|
| The interface to access a CATIASectioningSettingAtt.
"""
def __init__(self, com_object):
super().__init__(com_object)
self.sectioning_setting_att = com_object
@property
def clipping_mode(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property ClippingMode() As CatSectionClippingMode
|
| Returns or sets the ClippingMode parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: int
:rtype: int
"""
return self.sectioning_setting_att.ClippingMode
@clipping_mode.setter
def clipping_mode(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.ClippingMode = value
@property
def display_cut_in_wireframe(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property DisplayCutInWireframe() As boolean
|
| Returns or sets the DisplayCutInWireframe parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.DisplayCutInWireframe
@display_cut_in_wireframe.setter
def display_cut_in_wireframe(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.DisplayCutInWireframe = value
@property
def grid_auto_filtering(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property GridAutoFiltering() As boolean
|
| Returns or sets the GridAutoFiltering parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GridAutoFiltering
@grid_auto_filtering.setter
def grid_auto_filtering(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.GridAutoFiltering = value
@property
def grid_auto_resize(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property GridAutoResize() As boolean
|
| Returns or sets the GridAutoResize parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GridAutoResize
@grid_auto_resize.setter
def grid_auto_resize(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.GridAutoResize = value
@property
def grid_height_step(self) -> float:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property GridHeightStep() As float
|
| Returns or sets the GridHeightStep parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: float
:rtype: float
"""
return self.sectioning_setting_att.GridHeightStep
@grid_height_step.setter
def grid_height_step(self, value: float):
"""
:param float value:
"""
self.sectioning_setting_att.GridHeightStep = value
@property
def grid_position_mode(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property GridPositionMode() As CatGridPositionMode
|
| Returns or sets the GridPositionMode parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: int
:rtype: int
"""
return self.sectioning_setting_att.GridPositionMode
@grid_position_mode.setter
def grid_position_mode(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.GridPositionMode = value
@property
def grid_style(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property GridStyle() As CatSectionGridStyle
|
| Returns or sets the GridStyle parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: int
:rtype: int
"""
return self.sectioning_setting_att.GridStyle
@grid_style.setter
def grid_style(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.GridStyle = value
@property
def grid_width_step(self) -> float:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property GridWidthStep() As float
|
| Returns or sets the GridWidthStep parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: float
:rtype: float
"""
return self.sectioning_setting_att.GridWidthStep
@grid_width_step.setter
def grid_width_step(self, value: float):
"""
:param float value:
"""
self.sectioning_setting_att.GridWidthStep = value
@property
def hide_plane(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property HidePlane() As boolean
|
| Returns or sets the HidePlane parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.HidePlane
@hide_plane.setter
def hide_plane(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.HidePlane = value
@property
def hide_result(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property HideResult() As boolean
|
| Returns or sets the HideResult parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.HideResult
@hide_result.setter
def hide_result(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.HideResult = value
@property
def plane_normal(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property PlaneNormal() As CatSectionPlaneNormal
|
| Returns or sets the PlaneNormal parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: int
:rtype: int
"""
return self.sectioning_setting_att.PlaneNormal
@plane_normal.setter
def plane_normal(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.PlaneNormal = value
@property
def plane_origin(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property PlaneOrigin() As CatSectionPlaneOrigin
|
| Returns or sets the PlaneOrigin parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: int
:rtype: int
"""
return self.sectioning_setting_att.PlaneOrigin
@plane_origin.setter
def plane_origin(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.PlaneOrigin = value
@property
def section_export_type(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property SectionExportType() As boolean
|
| Returns or sets the SectionExportType parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.SectionExportType
@section_export_type.setter
def section_export_type(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.SectionExportType = value
@property
def section_fill(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property SectionFill() As boolean
|
| Returns or sets the SectionFill parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.SectionFill
@section_fill.setter
def section_fill(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.SectionFill = value
@property
def update_result(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property UpdateResult() As boolean
|
| Returns or sets the UpdateResult parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.UpdateResult
@update_result.setter
def update_result(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.UpdateResult = value
@property
def viewer_auto_open(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property ViewerAutoOpen() As boolean
|
| Returns or sets the ViewerAutoOpen parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.ViewerAutoOpen
@viewer_auto_open.setter
def viewer_auto_open(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.ViewerAutoOpen = value
@property
def viewer_auto_reframe(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property ViewerAutoReframe() As boolean
|
| Returns or sets the ViewerAutoReframe parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.ViewerAutoReframe
@viewer_auto_reframe.setter
def viewer_auto_reframe(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.ViewerAutoReframe = value
@property
def viewer_lock_2d(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property ViewerLock2D() As boolean
|
| Returns or sets the ViewerLock2D parameter.
|
| Ensure consistency with the C++ interface to which the work is delegated.
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.ViewerLock2D
@viewer_lock_2d.setter
def viewer_lock_2d(self, value: bool):
"""
:param bool value:
"""
self.sectioning_setting_att.ViewerLock2D = value
@property
def window_default_height(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property WindowDefaultHeight() As long
|
|
| Role:Retrieve section window default height if the window open mode is
| catSecWindow_DefaultSize
|
| Parameters:
|
| oWindowDefaultHeight
|
| Returns:
| S_OK Successfully retieved the window open mode E_FAIL Failed to
| retrieved the window open mode
:return: int
:rtype: int
"""
return self.sectioning_setting_att.WindowDefaultHeight
@window_default_height.setter
def window_default_height(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.WindowDefaultHeight = value
@property
def window_default_width(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property WindowDefaultWidth() As long
|
|
| Role:Retrieve section window default width if the window open mode is
| catSecWindow_DefaultSize
|
| Parameters:
|
| oWindowDefaultWidth
|
| Returns:
| S_OK Successfully retieved the window open mode E_FAIL Failed to
| retrieved the window open mode
:return: int
:rtype: int
"""
return self.sectioning_setting_att.WindowDefaultWidth
@window_default_width.setter
def window_default_width(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.WindowDefaultWidth = value
@property
def window_open_mode(self) -> int:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property WindowOpenMode() As CatSecWindowOpenMode
|
|
| Role:Retrieve section window open mode
|
| Parameters:
|
| oWindowOpenMode
| Legal values:
| catSecWindow_DefaultSize :Opens the sectioning window(s) with the
| default size specified in the Tools->Options.
| catSecWindow_TileVertically :Tiles the sectioning window(s)
| vertically in the viewer
|
| Returns:
| S_OK Successfully retieved the window open mode E_FAIL Failed to
| retrieved the window open mode
:return: int
:rtype: int
"""
return self.sectioning_setting_att.WindowOpenMode
@window_open_mode.setter
def window_open_mode(self, value: int):
"""
:param int value:
"""
self.sectioning_setting_att.WindowOpenMode = value
def get_clipping_mode_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetClippingModeInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the ClippingMode
| parameter.
| Role:Retrieves the state of the ClippingMode parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetClippingModeInfo(io_admin_level, io_locked)
def get_display_cut_in_wireframe_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetDisplayCutInWireframeInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the DisplayCutInWireframe
| parameter.
| Role:Retrieves the state of the DisplayCutInWireframe parameter in the
| current environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetDisplayCutInWireframeInfo(io_admin_level, io_locked)
def get_grid_auto_filtering_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetGridAutoFilteringInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the GridAutoFiltering
| parameter.
| Role:Retrieves the state of the GridAutoFiltering parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetGridAutoFilteringInfo(io_admin_level, io_locked)
def get_grid_auto_resize_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetGridAutoResizeInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the GridAutoResize
| parameter.
| Role:Retrieves the state of the GridAutoResize parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetGridAutoResizeInfo(io_admin_level, io_locked)
def get_grid_height_step_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetGridHeightStepInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the GridHeightStep
| parameter.
| Role:Retrieves the state of the GridHeightStep parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetGridHeightStepInfo(io_admin_level, io_locked)
def get_grid_position_mode_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetGridPositionModeInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the GridPositionMode
| parameter.
| Role:Retrieves the state of the GridPositionMode parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetGridPositionModeInfo(io_admin_level, io_locked)
def get_grid_style_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetGridStyleInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the GridStyle
| parameter.
| Role:Retrieves the state of the GridStyle parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetGridStyleInfo(io_admin_level, io_locked)
def get_grid_width_step_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetGridWidthStepInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the GridWidthStep
| parameter.
| Role:Retrieves the state of the GridWidthStep parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetGridWidthStepInfo(io_admin_level, io_locked)
def get_hide_plane_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetHidePlaneInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the HidePlane
| parameter.
| Role:Retrieves the state of the HidePlane parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetHidePlaneInfo(io_admin_level, io_locked)
def get_hide_result_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetHideResultInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the HideResult
| parameter.
| Role:Retrieves the state of the HideResult parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetHideResultInfo(io_admin_level, io_locked)
def get_plane_color(self, o_r: int, o_g: int, o_b: int) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub GetPlaneColor(long oR,
| long oG,
| long oB)
|
| Returns the PlaneColor parameter.
|
| Parameters:
|
| oR
| the red component of the color.
| oG
| the green component of the color.
| oB
| the blue component of the color.
|
| Ensure consistency with the C++ interface to which the work is
| delegated.
:param int o_r:
:param int o_g:
:param int o_b:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.GetPlaneColor(o_r, o_g, o_b)
def get_plane_color_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetPlaneColorInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the PlaneColor
| parameter.
| Role:Retrieves the state of the PlaneColor parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetPlaneColorInfo(io_admin_level, io_locked)
def get_plane_normal_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetPlaneNormalInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the PlaneNormal
| parameter.
| Role:Retrieves the state of the PlaneNormal parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetPlaneNormalInfo(io_admin_level, io_locked)
def get_plane_origin_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetPlaneOriginInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the PlaneOrigin
| parameter.
| Role:Retrieves the state of the PlaneOrigin parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetPlaneOriginInfo(io_admin_level, io_locked)
def get_section_export_type_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetSectionExportTypeInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the SectionExportType
| parameter.
| Role:Retrieves the state of the SectionExportType parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetSectionExportTypeInfo(io_admin_level, io_locked)
def get_section_fill_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetSectionFillInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the SectionFill
| parameter.
| Role:Retrieves the state of the SectionFill parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetSectionFillInfo(io_admin_level, io_locked)
def get_update_result_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetUpdateResultInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the UpdateResult
| parameter.
| Role:Retrieves the state of the UpdateResult parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetUpdateResultInfo(io_admin_level, io_locked)
def get_viewer_auto_open_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetViewerAutoOpenInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the ViewerAutoOpen
| parameter.
| Role:Retrieves the state of the ViewerAutoOpen parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetViewerAutoOpenInfo(io_admin_level, io_locked)
def get_viewer_auto_reframe_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetViewerAutoReframeInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the ViewerAutoReframe
| parameter.
| Role:Retrieves the state of the ViewerAutoReframe parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetViewerAutoReframeInfo(io_admin_level, io_locked)
def get_viewer_lock2_d_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetViewerLock2DInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the ViewerLock2D
| parameter.
| Role:Retrieves the state of the ViewerLock2D parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetViewerLock2DInfo(io_admin_level, io_locked)
def get_window_default_height_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetWindowDefaultHeightInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the WindowDefaultHeight
| parameter.
| Role:Retrieves the state of the WindowDefaultHeight parameter in the
| current environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetWindowDefaultHeightInfo(io_admin_level, io_locked)
def get_window_default_width_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetWindowDefaultWidthInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the WindowDefaultWidth
| parameter.
| Role:Retrieves the state of the WindowDefaultWidth parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetWindowDefaultWidthInfo(io_admin_level, io_locked)
def get_window_open_mode_info(self, io_admin_level: str, io_locked: str) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Func GetWindowOpenModeInfo(CATBSTR ioAdminLevel,
| CATBSTR ioLocked) As boolean
|
| Retrieves environment informations for the WindowOpenMode
| parameter.
| Role:Retrieves the state of the WindowOpenMode parameter in the current
| environment.
|
| Parameters:
|
| ioAdminLevel
|
| If the parameter is locked, AdminLevel gives the administration
| level that imposes the value of the parameter.
| If the parameter is not locked, AdminLevel gives the administration
| level that will give the value of the parameter after a reset.
|
| ioLocked
| Indicates if the parameter has been locked.
|
| Returns:
| Indicates if the parameter has been explicitly modified or remain to
| the administrated value.
:param str io_admin_level:
:param str io_locked:
:return: bool
:rtype: bool
"""
return self.sectioning_setting_att.GetWindowOpenModeInfo(io_admin_level, io_locked)
def set_clipping_mode_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetClippingModeLock(boolean iLocked)
|
| Locks or unlocks the ClippingMode parameter.
| Role:Locks or unlocks the PlaneOrigin parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetClippingModeLock(i_locked)
def set_display_cut_in_wireframe_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetDisplayCutInWireframeLock(boolean iLocked)
|
| Locks or unlocks the DisplayCutInWireframe parameter.
| Role:Locks or unlocks the DisplayCutInWireframe parameter if it is possible
| in the current administrative context. In user mode this method will always
| return E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetDisplayCutInWireframeLock(i_locked)
def set_grid_auto_filtering_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetGridAutoFilteringLock(boolean iLocked)
|
| Locks or unlocks the GridAutoFiltering parameter.
| Role:Locks or unlocks the GridAutoFiltering parameter if it is possible in
| the current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetGridAutoFilteringLock(i_locked)
def set_grid_auto_resize_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetGridAutoResizeLock(boolean iLocked)
|
| Locks or unlocks the GridAutoResize parameter.
| Role:Locks or unlocks the GridAutoResize parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetGridAutoResizeLock(i_locked)
def set_grid_height_step_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetGridHeightStepLock(boolean iLocked)
|
| Locks or unlocks the GridHeightStep parameter.
| Role:Locks or unlocks the GridHeightStep parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetGridHeightStepLock(i_locked)
def set_grid_position_mode_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetGridPositionModeLock(boolean iLocked)
|
| Locks or unlocks the GridPositionMode parameter.
| Role:Locks or unlocks the GridPositionMode parameter if it is possible in
| the current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetGridPositionModeLock(i_locked)
def set_grid_style_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetGridStyleLock(boolean iLocked)
|
| Locks or unlocks the GridStyle parameter.
| Role:Locks or unlocks the GridStyle parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetGridStyleLock(i_locked)
def set_grid_width_step_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetGridWidthStepLock(boolean iLocked)
|
| Locks or unlocks the GridWidthStep parameter.
| Role:Locks or unlocks the GridWidthStep parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetGridWidthStepLock(i_locked)
def set_hide_plane_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetHidePlaneLock(boolean iLocked)
|
| Locks or unlocks the HidePlane parameter.
| Role:Locks or unlocks the HidePlane parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetHidePlaneLock(i_locked)
def set_hide_result_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetHideResultLock(boolean iLocked)
|
| Locks or unlocks the HideResult parameter.
| Role:Locks or unlocks the HideResult parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetHideResultLock(i_locked)
def set_plane_color(self, i_r: int, i_g: int, i_b: int) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetPlaneColor(long iR,
| long iG,
| long iB)
|
| Sets the PlaneColor parameter.
|
| Parameters:
|
| oR
| the red component of the color.
| oG
| the green component of the color.
| oB
| the blue component of the color.
|
| Ensure consistency with the C++ interface to which the work is
| delegated.
:param int i_r:
:param int i_g:
:param int i_b:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetPlaneColor(i_r, i_g, i_b)
def set_plane_color_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetPlaneColorLock(boolean iLocked)
|
| Locks or unlocks the PlaneColor parameter.
| Role:Locks or unlocks the PlaneColor parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetPlaneColorLock(i_locked)
def set_plane_normal_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetPlaneNormalLock(boolean iLocked)
|
| Locks or unlocks the PlaneNormal parameter.
| Role:Locks or unlocks the PlaneNormal parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetPlaneNormalLock(i_locked)
def set_plane_origin_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetPlaneOriginLock(boolean iLocked)
|
| Locks or unlocks the PlaneOrigin parameter.
| Role:Locks or unlocks the PlaneOrigin parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetPlaneOriginLock(i_locked)
def set_section_export_type_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetSectionExportTypeLock(boolean iLocked)
|
| Locks or unlocks the SectionExportType parameter.
| Role:Locks or unlocks the SectionExportType parameter if it is possible in
| the current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetSectionExportTypeLock(i_locked)
def set_section_fill_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetSectionFillLock(boolean iLocked)
|
| Locks or unlocks the SectionFill parameter.
| Role:Locks or unlocks the SectionFill parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetSectionFillLock(i_locked)
def set_update_result_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetUpdateResultLock(boolean iLocked)
|
| Locks or unlocks the UpdateResult parameter.
| Role:Locks or unlocks the UpdateResult parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetUpdateResultLock(i_locked)
def set_viewer_auto_open_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetViewerAutoOpenLock(boolean iLocked)
|
| Locks or unlocks the ViewerAutoOpen parameter.
| Role:Locks or unlocks the ViewerAutoOpen parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetViewerAutoOpenLock(i_locked)
def set_viewer_auto_reframe_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetViewerAutoReframeLock(boolean iLocked)
|
| Locks or unlocks the ViewerAutoReframe parameter.
| Role:Locks or unlocks the ViewerAutoReframe parameter if it is possible in
| the current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetViewerAutoReframeLock(i_locked)
def set_viewer_lock2_d_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetViewerLock2DLock(boolean iLocked)
|
| Locks or unlocks the ViewerLock2D parameter.
| Role:Locks or unlocks the ViewerLock2D parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetViewerLock2DLock(i_locked)
def set_window_default_height_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetWindowDefaultHeightLock(boolean iLocked)
|
| Locks or unlocks the WindowDefaultHeight parameter.
| Role:Locks or unlocks the WindowDefaultHeight parameter if it is possible
| in the current administrative context. In user mode this method will always
| return E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetWindowDefaultHeightLock(i_locked)
def set_window_default_width_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetWindowDefaultWidthLock(boolean iLocked)
|
| Locks or unlocks the WindowDefaultWidth parameter.
| Role:Locks or unlocks the WindowDefaultWidth parameter if it is possible in
| the current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetWindowDefaultWidthLock(i_locked)
def set_window_open_mode_lock(self, i_locked: bool) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub SetWindowOpenModeLock(boolean iLocked)
|
| Locks or unlocks the WindowOpenMode parameter.
| Role:Locks or unlocks the WindowOpenMode parameter if it is possible in the
| current administrative context. In user mode this method will always return
| E_FAIL.
|
| Parameters:
|
| iLocked
| the locking operation to be performed Legal
| values:
| TRUE : to lock the parameter.
| FALSE: to unlock the parameter.
:param bool i_locked:
:return: None
:rtype: None
"""
return self.sectioning_setting_att.SetWindowOpenModeLock(i_locked)
def __repr__(self):
return f'SectioningSettingAtt(name="{self.name}")'
| 37.689606
| 108
| 0.49665
| 7,336
| 79,412
| 5.270174
| 0.042257
| 0.054627
| 0.048342
| 0.055248
| 0.859681
| 0.825436
| 0.763851
| 0.739693
| 0.729269
| 0.719828
| 0
| 0.033379
| 0.444668
| 79,412
| 2,106
| 109
| 37.707502
| 0.843311
| 0.668463
| 0
| 0.093333
| 1
| 0
| 0.002943
| 0.002943
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.004444
| 0.004444
| 0.711111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.