hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
43fe87382f808bfcf01cbb71193792d56c8a1c73 | 656 | py | Python | airtng_flask/models/__init__.py | satchkat/airtng-masked-numbers-flask | 8f9ffad7fb7716bdf96cca06854b5ebbb5228fda | [
"MIT"
] | 5 | 2019-08-06T13:02:14.000Z | 2021-08-23T07:15:17.000Z | airtng_flask/models/__init__.py | satchkat/airtng-masked-numbers-flask | 8f9ffad7fb7716bdf96cca06854b5ebbb5228fda | [
"MIT"
] | 84 | 2019-08-20T07:41:19.000Z | 2022-03-31T07:13:06.000Z | airtng_flask/models/__init__.py | satchkat/airtng-masked-numbers-flask | 8f9ffad7fb7716bdf96cca06854b5ebbb5228fda | [
"MIT"
] | 3 | 2020-06-18T11:50:57.000Z | 2022-03-11T17:45:10.000Z | model_settings = {
'db': None,
'bcrypt': None,
'app': None,
}
def init_models_module(db, bcrypt, flask_app):
model_settings['db'] = db
model_settings['bcrypt'] = bcrypt
model_settings['app'] = flask_app
def app_db():
return model_settings['db']
def bcrypt():
return model_settings['bcrypt']
def auth_token():
return model_settings['app'].config['TWILIO_AUTH_TOKEN']
def phone_number():
return model_settings['app'].config['TWILIO_NUMBER']
def account_sid():
return model_settings['app'].config['TWILIO_ACCOUNT_SID']
def application_sid():
return model_settings['app'].config['APPLICATION_SID']
| 18.222222 | 61 | 0.685976 | 84 | 656 | 5.059524 | 0.25 | 0.305882 | 0.268235 | 0.207059 | 0.32 | 0.32 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166159 | 656 | 35 | 62 | 18.742857 | 0.776965 | 0 | 0 | 0 | 0 | 0 | 0.160061 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.285714 | 0.619048 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
a138cf4912f40e2315ddb4b40cb941417ed92a2a | 62 | py | Python | sample/sample.py | eaybek/skive | 6e6aa45bc91a27649c654edfb3fb773c8e9e456e | [
"MIT"
] | null | null | null | sample/sample.py | eaybek/skive | 6e6aa45bc91a27649c654edfb3fb773c8e9e456e | [
"MIT"
] | null | null | null | sample/sample.py | eaybek/skive | 6e6aa45bc91a27649c654edfb3fb773c8e9e456e | [
"MIT"
] | null | null | null | from skive.skive import Skive
class Skive(object):
pass
| 10.333333 | 29 | 0.725806 | 9 | 62 | 5 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.209677 | 62 | 5 | 30 | 12.4 | 0.918367 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
a1613e61e69f58e4f7a42ad075ebdcce0df1a151 | 158 | py | Python | util/test/tests/Vulkan/VK_Buffer_Truncation.py | PLohrmannAMD/renderdoc | ea16d31aa340581f5e505e0c734a8468e5d3d47f | [
"MIT"
] | 6,181 | 2015-01-07T11:49:11.000Z | 2022-03-31T21:46:55.000Z | util/test/tests/Vulkan/VK_Buffer_Truncation.py | PLohrmannAMD/renderdoc | ea16d31aa340581f5e505e0c734a8468e5d3d47f | [
"MIT"
] | 2,015 | 2015-01-16T01:45:25.000Z | 2022-03-25T12:01:06.000Z | util/test/tests/Vulkan/VK_Buffer_Truncation.py | PLohrmannAMD/renderdoc | ea16d31aa340581f5e505e0c734a8468e5d3d47f | [
"MIT"
] | 1,088 | 2015-01-06T08:36:25.000Z | 2022-03-30T03:31:21.000Z | import rdtest
import renderdoc as rd
class VK_Buffer_Truncation(rdtest.Buffer_Truncation):
demos_test_name = 'VK_Buffer_Truncation'
internal = False | 22.571429 | 53 | 0.803797 | 21 | 158 | 5.714286 | 0.666667 | 0.4 | 0.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.14557 | 158 | 7 | 54 | 22.571429 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0.125786 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a191131485c7f04812287ee64b6add2ac0c47de9 | 10,652 | py | Python | tests/metrics/test_primitives.py | NunoEdgarGFlowHub/torchbearer | d2b21b8ffcabde5b505cb1c736e05af6ee4276ca | [
"MIT"
] | 358 | 2018-07-23T13:30:38.000Z | 2019-06-02T07:18:35.000Z | tests/metrics/test_primitives.py | Jayaudaykmar26589/torchbearer | 940e75ec88acd59d5a97aa8c721f7cfa30a5c4d0 | [
"MIT"
] | 307 | 2018-07-18T12:07:23.000Z | 2019-06-03T18:00:27.000Z | tests/metrics/test_primitives.py | Jayaudaykmar26589/torchbearer | 940e75ec88acd59d5a97aa8c721f7cfa30a5c4d0 | [
"MIT"
] | 42 | 2018-07-23T22:49:23.000Z | 2019-05-20T07:22:55.000Z | import unittest
import torch
from torch.autograd import Variable
import torchbearer
from torchbearer.metrics import Loss, Epoch, CategoricalAccuracy, TopKCategoricalAccuracy, BinaryAccuracy, MeanSquaredError
class TestLoss(unittest.TestCase):
def setUp(self):
with torch.no_grad():
self._state = {
torchbearer.LOSS: torch.FloatTensor([2.35])
}
self._metric = Loss().root # Get root node of Tree for testing
def test_train_process(self):
self._metric.train()
result = self._metric.process(self._state)
self.assertAlmostEqual(2.35, result[0], 3, 0.002)
def test_validate_process(self):
self._metric.eval()
result = self._metric.process(self._state)
self.assertAlmostEqual(2.35, result[0], 3, 0.002)
class TestEpoch(unittest.TestCase):
def setUp(self):
self._state = {
torchbearer.EPOCH: 101
}
self._metric = Epoch().metric # Get wrapped metric for testing
def test_process(self):
result = self._metric.process(self._state)
self.assertEqual(101, result)
def test_process_final(self):
result = self._metric.process_final(self._state)
self.assertEqual(101, result)
class TestBinaryAccuracy(unittest.TestCase):
def setUp(self):
self._state = {
torchbearer.Y_TRUE: torch.LongTensor([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0]
]),
torchbearer.Y_PRED: torch.FloatTensor([
[0.9, 0.1, 0.1], # Correct
[0.1, 0.9, 0.1], # Correct
[0.1, 0.1, 0.9], # Correct
[0.9, 0.1, 0.1], # Incorrect
[0.9, 0.1, 0.1] # Incorrect
])
}
self._targets = [1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1]
self._metric = BinaryAccuracy().root # Get root node of Tree for testing
def test_train_process(self):
self._metric.train()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_validate_process(self):
self._metric.eval()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_weird_types(self):
state = {
torchbearer.Y_TRUE: torch.LongTensor([
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0]
]).byte(),
torchbearer.Y_PRED: torch.FloatTensor([
[0.9, 0.1, 0.1], # Correct
[0.1, 0.9, 0.1], # Correct
[0.1, 0.1, 0.9], # Correct
[0.9, 0.1, 0.1], # Incorrect
[0.9, 0.1, 0.1] # Incorrect
]).double()
}
self._metric.train()
result = self._metric.process(state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_threshold(self):
state = {
torchbearer.Y_TRUE: torch.FloatTensor([
[0.9, 0, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0.3, 1],
[0, 0.6, 0]
]),
torchbearer.Y_PRED: torch.FloatTensor([
[0.9, 0.1, 0.1], # Correct
[0.1, 0.9, 0.1], # Correct
[0.1, 0.1, 0.9], # Correct
[0.9, 0.1, 0.1], # Incorrect
[0.9, 0.1, 0.1] # Incorrect
])
}
metric = BinaryAccuracy(threshold=0.4).root # Get root node of Tree for testing
metric.train()
result = metric.process(state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
class TestCategoricalAccuracy(unittest.TestCase):
def setUp(self):
self._state = {
torchbearer.Y_TRUE: Variable(torch.LongTensor([0, 1, 2, 2, 1])),
torchbearer.Y_PRED: Variable(torch.FloatTensor([
[0.9, 0.1, 0.1], # Correct
[0.1, 0.9, 0.1], # Correct
[0.1, 0.1, 0.9], # Correct
[0.9, 0.1, 0.1], # Incorrect
[0.9, 0.1, 0.1], # Incorrect
])),
}
self._targets = [1, 1, 1, 0, 0]
self._metric = CategoricalAccuracy().root # Get root node of Tree for testing
def test_ignore_index(self):
metric = CategoricalAccuracy(ignore_index=1).root # Get root node of Tree for testing
targets = [1, 1, 0]
metric.train()
result = metric.process(self._state)
for i in range(0, len(targets)):
self.assertEqual(result[i], targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(targets[i])
+ ' in: ' + str(result))
def test_train_process(self):
self._metric.train()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_train_process_soft(self):
self._metric.train()
soft_targets = torch.FloatTensor([
[0.98, 0.01, 0.01], # Correct
[0.01, 0.98, 0.01], # Correct
[0.01, 0.01, 0.98], # Correct
[0.01, 0.01, 0.98], # Incorrect
[0.01, 0.98, 0.01], # Incorrect
])
state = self._state.copy()
state[torchbearer.Y_TRUE] = soft_targets
result = self._metric.process(state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_validate_process(self):
self._metric.eval()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
class TestTopKCategoricalAccuracy(unittest.TestCase):
def setUp(self):
self._state = {
torchbearer.Y_TRUE: Variable(torch.LongTensor([0, 5, 2, 3, 1])),
torchbearer.Y_PRED: Variable(torch.FloatTensor([
[0.9, 0.8, 0.7, 0.6, 0.5, 0.4], # Correct
[0.4, 0.5, 0.6, 0.7, 0.8, 0.9], # Correct
[0.6, 0.5, 0.4, 0.7, 0.8, 0.9], # Incorrect
[0.6, 0.5, 0.7, 0.4, 0.8, 0.9], # Incorrect
[0.4, 0.5, 0.6, 0.7, 0.8, 0.9] # Correct
]))
}
self._targets = [1, 1, 0, 0, 1]
self._metric = TopKCategoricalAccuracy(k=5).root # Get root node of Tree for testing
def test_ignore_index(self):
metric = TopKCategoricalAccuracy(ignore_index=1).root # Get root node of Tree for testing
targets = [1, 1, 0, 0]
metric.train()
result = metric.process(self._state)
for i in range(0, len(targets)):
self.assertEqual(result[i], targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(targets[i])
+ ' in: ' + str(result))
def test_train_process(self):
self._metric.train()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_validate_process(self):
self._metric.eval()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertEqual(result[i], self._targets[i],
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_top_ten_default(self):
metric = torchbearer.metrics.get_default('top_10_acc').root
self.assertEqual(metric.k, 10)
class TestMeanSquaredError(unittest.TestCase):
def setUp(self):
self._state = {
torchbearer.Y_TRUE: Variable(torch.FloatTensor(
[0.8, 0.2, 0.0, 0.4, 0.3, 0.7]
)),
torchbearer.Y_PRED: Variable(torch.FloatTensor(
[0.9, 0.1, 0.1, 0.7, 0.5, 0.6]
))
}
self._targets = [0.01, 0.01, 0.01, 0.09, 0.04, 0.01]
self._metric = MeanSquaredError().root # Get root node of Tree for testing
def test_train_process(self):
self._metric.train()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertAlmostEqual(result[i].item(), self._targets[i], places=3,
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result))
def test_validate_process(self):
self._metric.eval()
result = self._metric.process(self._state)
for i in range(0, len(self._targets)):
self.assertAlmostEqual(result[i].item(), self._targets[i], places=3,
msg='returned: ' + str(result[i]) + ' expected: ' + str(self._targets[i])
+ ' in: ' + str(result)) | 39.746269 | 123 | 0.498592 | 1,286 | 10,652 | 4.014774 | 0.075428 | 0.021693 | 0.021499 | 0.01472 | 0.814836 | 0.788495 | 0.757893 | 0.741817 | 0.725353 | 0.723029 | 0 | 0.059947 | 0.356365 | 10,652 | 268 | 124 | 39.746269 | 0.693116 | 0.053136 | 0 | 0.689655 | 0 | 0 | 0.034637 | 0 | 0 | 0 | 0 | 0 | 0.077586 | 1 | 0.103448 | false | 0 | 0.021552 | 0 | 0.150862 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
a1a364a2e72fec3d62af863178ca96d6dfef37b5 | 136 | py | Python | pelican_debugger.py | anselmos/debug_pelican_signals | a12190843bdcd6feaa9b2b61797c0b83c4357ec5 | [
"MIT"
] | null | null | null | pelican_debugger.py | anselmos/debug_pelican_signals | a12190843bdcd6feaa9b2b61797c0b83c4357ec5 | [
"MIT"
] | null | null | null | pelican_debugger.py | anselmos/debug_pelican_signals | a12190843bdcd6feaa9b2b61797c0b83c4357ec5 | [
"MIT"
] | null | null | null | from pelican import parse_arguments
from pelican import get_instance
pelican, settings = get_instance(parse_arguments())
pelican.run()
| 22.666667 | 51 | 0.830882 | 18 | 136 | 6.055556 | 0.5 | 0.201835 | 0.311927 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102941 | 136 | 5 | 52 | 27.2 | 0.893443 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
a1ac3dcf99833da59956337147de32f446e4cabe | 9,031 | py | Python | irctest/server_tests/test_sasl.py | delthas/irctest | c12c44b9938986608a8114cc21f1b5719cd110cb | [
"MIT"
] | 8 | 2017-11-01T17:43:13.000Z | 2022-01-30T08:21:50.000Z | irctest/server_tests/test_sasl.py | delthas/irctest | c12c44b9938986608a8114cc21f1b5719cd110cb | [
"MIT"
] | 32 | 2016-12-01T09:23:58.000Z | 2020-09-23T05:48:01.000Z | irctest/server_tests/test_sasl.py | delthas/irctest | c12c44b9938986608a8114cc21f1b5719cd110cb | [
"MIT"
] | 3 | 2017-11-14T03:54:39.000Z | 2020-09-09T06:47:57.000Z | import base64
from irctest import cases
class RegistrationTestCase(cases.BaseServerTestCase):
def testRegistration(self):
self.controller.registerUser(self, 'testuser', 'mypassword')
class SaslTestCase(cases.BaseServerTestCase, cases.OptionalityHelper):
@cases.SpecificationSelector.requiredBySpecification('IRCv3.1')
@cases.OptionalityHelper.skipUnlessHasMechanism('PLAIN')
def testPlain(self):
"""PLAIN authentication with correct username/password."""
self.controller.registerUser(self, 'foo', 'sesame')
self.controller.registerUser(self, 'jilles', 'sesame')
self.controller.registerUser(self, 'bar', 'sesame')
self.addClient()
self.sendLine(1, 'CAP LS 302')
capabilities = self.getCapLs(1)
self.assertIn('sasl', capabilities,
fail_msg='Does not have SASL as the controller claims.')
if capabilities['sasl'] is not None:
self.assertIn('PLAIN', capabilities['sasl'],
fail_msg='Does not have PLAIN mechanism as the controller '
'claims')
self.sendLine(1, 'AUTHENTICATE PLAIN')
m = self.getMessage(1, filter_pred=lambda m:m.command != 'NOTICE')
self.assertMessageEqual(m, command='AUTHENTICATE', params=['+'],
fail_msg='Sent “AUTHENTICATE PLAIN”, server should have '
'replied with “AUTHENTICATE +”, but instead sent: {msg}')
self.sendLine(1, 'AUTHENTICATE amlsbGVzAGppbGxlcwBzZXNhbWU=')
m = self.getMessage(1, filter_pred=lambda m:m.command != 'NOTICE')
self.assertMessageEqual(m, command='900',
fail_msg='Did not send 900 after correct SASL authentication.')
self.assertEqual(m.params[2], 'jilles', m,
fail_msg='900 should contain the account name as 3rd argument '
'({expects}), not {got}: {msg}')
@cases.SpecificationSelector.requiredBySpecification('IRCv3.1')
@cases.OptionalityHelper.skipUnlessHasMechanism('PLAIN')
def testPlainNoAuthzid(self):
"""“message = [authzid] UTF8NUL authcid UTF8NUL passwd
[…]
Upon receipt of the message, the server will verify the presented (in
the message) authentication identity (authcid) and password (passwd)
with the system authentication database, and it will verify that the
authentication credentials permit the client to act as the (presented
or derived) authorization identity (authzid). If both steps succeed,
the user is authenticated.
[…]
When no authorization identity is provided, the server derives an
authorization identity from the prepared representation of the
provided authentication identity string. This ensures that the
derivation of different representations of the authentication
identity produces the same authorization identity.”
-- <https://tools.ietf.org/html/rfc4616#section-2>
"""
self.controller.registerUser(self, 'foo', 'sesame')
self.controller.registerUser(self, 'jilles', 'sesame')
self.controller.registerUser(self, 'bar', 'sesame')
self.addClient()
self.sendLine(1, 'CAP LS 302')
capabilities = self.getCapLs(1)
self.assertIn('sasl', capabilities,
fail_msg='Does not have SASL as the controller claims.')
if capabilities['sasl'] is not None:
self.assertIn('PLAIN', capabilities['sasl'],
fail_msg='Does not have PLAIN mechanism as the controller '
'claims')
self.sendLine(1, 'AUTHENTICATE PLAIN')
m = self.getMessage(1, filter_pred=lambda m:m.command != 'NOTICE')
self.assertMessageEqual(m, command='AUTHENTICATE', params=['+'],
fail_msg='Sent “AUTHENTICATE PLAIN”, server should have '
'replied with “AUTHENTICATE +”, but instead sent: {msg}')
self.sendLine(1, 'AUTHENTICATE AGppbGxlcwBzZXNhbWU=')
m = self.getMessage(1, filter_pred=lambda m:m.command != 'NOTICE')
self.assertMessageEqual(m, command='900',
fail_msg='Did not send 900 after correct SASL authentication.')
self.assertEqual(m.params[2], 'jilles', m,
fail_msg='900 should contain the account name as 3rd argument '
'({expects}), not {got}: {msg}')
@cases.SpecificationSelector.requiredBySpecification('IRCv3.1')
def testMechanismNotAvailable(self):
"""“If authentication fails, a 904 or 905 numeric will be sent”
-- <http://ircv3.net/specs/extensions/sasl-3.1.html#the-authenticate-command>
"""
self.controller.registerUser(self, 'jilles', 'sesame')
self.addClient()
self.sendLine(1, 'CAP LS 302')
capabilities = self.getCapLs(1)
self.assertIn('sasl', capabilities,
fail_msg='Does not have SASL as the controller claims.')
self.sendLine(1, 'AUTHENTICATE FOO')
m = self.getRegistrationMessage(1)
self.assertMessageEqual(m, command='904',
fail_msg='Did not reply with 904 to “AUTHENTICATE FOO”: {msg}')
@cases.SpecificationSelector.requiredBySpecification('IRCv3.1')
@cases.OptionalityHelper.skipUnlessHasMechanism('PLAIN')
def testPlainLarge(self):
"""Test the client splits large AUTHENTICATE messages whose payload
is not a multiple of 400.
<http://ircv3.net/specs/extensions/sasl-3.1.html#the-authenticate-command>
"""
self.controller.registerUser(self, 'foo', 'bar'*100)
authstring = base64.b64encode(b'\x00'.join(
[b'foo', b'foo', b'bar'*100])).decode()
self.addClient()
self.sendLine(1, 'CAP LS 302')
capabilities = self.getCapLs(1)
self.assertIn('sasl', capabilities,
fail_msg='Does not have SASL as the controller claims.')
if capabilities['sasl'] is not None:
self.assertIn('PLAIN', capabilities['sasl'],
fail_msg='Does not have PLAIN mechanism as the controller '
'claims')
self.sendLine(1, 'AUTHENTICATE PLAIN')
m = self.getRegistrationMessage(1)
self.assertMessageEqual(m, command='AUTHENTICATE', params=['+'],
fail_msg='Sent “AUTHENTICATE PLAIN”, expected '
'“AUTHENTICATE +” as a response, but got: {msg}')
self.sendLine(1, 'AUTHENTICATE {}'.format(authstring[0:400]))
self.sendLine(1, 'AUTHENTICATE {}'.format(authstring[400:]))
self.confirmSuccessfulAuth()
def confirmSuccessfulAuth(self):
# TODO: check username/etc in this as well, so we can apply it to other tests
# TODO: may be in the other order
m = self.getRegistrationMessage(1)
self.assertMessageEqual(m, command='900',
fail_msg='Expected 900 (RPL_LOGGEDIN) after successful '
'login, but got: {msg}')
m = self.getRegistrationMessage(1)
self.assertMessageEqual(m, command='903',
fail_msg='Expected 903 (RPL_SASLSUCCESS) after successful '
'login, but got: {msg}')
# TODO: add a test for when the length of the authstring is greater than 800.
# I don't know how to do it, because it would make the registration
# message's length too big for it to be valid.
@cases.SpecificationSelector.requiredBySpecification('IRCv3.1')
@cases.OptionalityHelper.skipUnlessHasMechanism('PLAIN')
def testPlainLargeEquals400(self):
"""Test the client splits large AUTHENTICATE messages whose payload
is not a multiple of 400.
<http://ircv3.net/specs/extensions/sasl-3.1.html#the-authenticate-command>
"""
self.controller.registerUser(self, 'foo', 'bar'*97)
authstring = base64.b64encode(b'\x00'.join(
[b'foo', b'foo', b'bar'*97])).decode()
assert len(authstring) == 400, 'Bad test'
self.addClient()
self.sendLine(1, 'CAP LS 302')
capabilities = self.getCapLs(1)
self.assertIn('sasl', capabilities,
fail_msg='Does not have SASL as the controller claims.')
if capabilities['sasl'] is not None:
self.assertIn('PLAIN', capabilities['sasl'],
fail_msg='Does not have PLAIN mechanism as the controller '
'claims')
self.sendLine(1, 'AUTHENTICATE PLAIN')
m = self.getRegistrationMessage(1)
self.assertMessageEqual(m, command='AUTHENTICATE', params=['+'],
fail_msg='Sent “AUTHENTICATE PLAIN”, expected '
'“AUTHENTICATE +” as a response, but got: {msg}')
self.sendLine(1, 'AUTHENTICATE {}'.format(authstring))
self.sendLine(1, 'AUTHENTICATE +')
self.confirmSuccessfulAuth()
# TODO: add a test for when the length of the authstring is 800.
# I don't know how to do it, because it would make the registration
# message's length too big for it to be valid.
| 49.620879 | 85 | 0.639907 | 1,015 | 9,031 | 5.673892 | 0.214778 | 0.02431 | 0.036117 | 0.047751 | 0.745095 | 0.745095 | 0.727904 | 0.725126 | 0.690571 | 0.690571 | 0 | 0.024401 | 0.246706 | 9,031 | 181 | 86 | 49.895028 | 0.821255 | 0.198538 | 0 | 0.773438 | 0 | 0 | 0.268918 | 0.00399 | 0 | 0 | 0 | 0.01105 | 0.164063 | 1 | 0.054688 | false | 0.007813 | 0.015625 | 0 | 0.085938 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
a1d1bfe26472ce13355c0d722502c1b931da3ce5 | 22,165 | py | Python | distantrs/proto/google/devtools/resultstore/v2/test_suite_pb2.py | antmicro/distant-rs | bb51b3594a5c6651a8091da6047e709c612ac6de | [
"Apache-2.0"
] | null | null | null | distantrs/proto/google/devtools/resultstore/v2/test_suite_pb2.py | antmicro/distant-rs | bb51b3594a5c6651a8091da6047e709c612ac6de | [
"Apache-2.0"
] | null | null | null | distantrs/proto/google/devtools/resultstore/v2/test_suite_pb2.py | antmicro/distant-rs | bb51b3594a5c6651a8091da6047e709c612ac6de | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/devtools/resultstore/v2/test_suite.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from distantrs.proto.google.devtools.resultstore.v2 import common_pb2 as google_dot_devtools_dot_resultstore_dot_v2_dot_common__pb2
from distantrs.proto.google.devtools.resultstore.v2 import file_pb2 as google_dot_devtools_dot_resultstore_dot_v2_dot_file__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/devtools/resultstore/v2/test_suite.proto',
package='google.devtools.resultstore.v2',
syntax='proto3',
serialized_options=b'\n\"com.google.devtools.resultstore.v2P\001ZIgoogle.golang.org/genproto/googleapis/devtools/resultstore/v2;resultstore',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n/google/devtools/resultstore/v2/test_suite.proto\x12\x1egoogle.devtools.resultstore.v2\x1a+google/devtools/resultstore/v2/common.proto\x1a)google/devtools/resultstore/v2/file.proto\"\xf9\x02\n\tTestSuite\x12\x12\n\nsuite_name\x18\x01 \x01(\t\x12\x33\n\x05tests\x18\x02 \x03(\x0b\x32$.google.devtools.resultstore.v2.Test\x12=\n\x08\x66\x61ilures\x18\x03 \x03(\x0b\x32+.google.devtools.resultstore.v2.TestFailure\x12\x39\n\x06\x65rrors\x18\x04 \x03(\x0b\x32).google.devtools.resultstore.v2.TestError\x12\x36\n\x06timing\x18\x06 \x01(\x0b\x32&.google.devtools.resultstore.v2.Timing\x12<\n\nproperties\x18\x07 \x03(\x0b\x32(.google.devtools.resultstore.v2.Property\x12\x33\n\x05\x66iles\x18\x08 \x03(\x0b\x32$.google.devtools.resultstore.v2.File\"\x93\x01\n\x04Test\x12=\n\ttest_case\x18\x01 \x01(\x0b\x32(.google.devtools.resultstore.v2.TestCaseH\x00\x12?\n\ntest_suite\x18\x02 \x01(\x0b\x32).google.devtools.resultstore.v2.TestSuiteH\x00\x42\x0b\n\ttest_type\"\x93\x04\n\x08TestCase\x12\x11\n\tcase_name\x18\x01 \x01(\t\x12\x12\n\nclass_name\x18\x02 \x01(\t\x12?\n\x06result\x18\x03 \x01(\x0e\x32/.google.devtools.resultstore.v2.TestCase.Result\x12=\n\x08\x66\x61ilures\x18\x04 \x03(\x0b\x32+.google.devtools.resultstore.v2.TestFailure\x12\x39\n\x06\x65rrors\x18\x05 \x03(\x0b\x32).google.devtools.resultstore.v2.TestError\x12\x36\n\x06timing\x18\x07 \x01(\x0b\x32&.google.devtools.resultstore.v2.Timing\x12<\n\nproperties\x18\x08 \x03(\x0b\x32(.google.devtools.resultstore.v2.Property\x12\x33\n\x05\x66iles\x18\t \x03(\x0b\x32$.google.devtools.resultstore.v2.File\"z\n\x06Result\x12\x16\n\x12RESULT_UNSPECIFIED\x10\x00\x12\r\n\tCOMPLETED\x10\x01\x12\x0f\n\x0bINTERRUPTED\x10\x02\x12\r\n\tCANCELLED\x10\x03\x12\x0c\n\x08\x46ILTERED\x10\x04\x12\x0b\n\x07SKIPPED\x10\x05\x12\x0e\n\nSUPPRESSED\x10\x06\"u\n\x0bTestFailure\x12\x17\n\x0f\x66\x61ilure_message\x18\x01 \x01(\t\x12\x16\n\x0e\x65xception_type\x18\x02 \x01(\t\x12\x13\n\x0bstack_trace\x18\x03 \x01(\t\x12\x10\n\x08\x65xpected\x18\x04 \x03(\t\x12\x0e\n\x06\x61\x63tual\x18\x05 \x03(\t\"O\n\tTestError\x12\x15\n\rerror_message\x18\x01 \x01(\t\x12\x16\n\x0e\x65xception_type\x18\x02 \x01(\t\x12\x13\n\x0bstack_trace\x18\x03 \x01(\tBq\n\"com.google.devtools.resultstore.v2P\x01ZIgoogle.golang.org/genproto/googleapis/devtools/resultstore/v2;resultstoreb\x06proto3'
,
dependencies=[google_dot_devtools_dot_resultstore_dot_v2_dot_common__pb2.DESCRIPTOR,google_dot_devtools_dot_resultstore_dot_v2_dot_file__pb2.DESCRIPTOR,])
_TESTCASE_RESULT = _descriptor.EnumDescriptor(
name='Result',
full_name='google.devtools.resultstore.v2.TestCase.Result',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='RESULT_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='COMPLETED', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INTERRUPTED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CANCELLED', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FILTERED', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SKIPPED', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SUPPRESSED', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1111,
serialized_end=1233,
)
_sym_db.RegisterEnumDescriptor(_TESTCASE_RESULT)
_TESTSUITE = _descriptor.Descriptor(
name='TestSuite',
full_name='google.devtools.resultstore.v2.TestSuite',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='suite_name', full_name='google.devtools.resultstore.v2.TestSuite.suite_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tests', full_name='google.devtools.resultstore.v2.TestSuite.tests', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='failures', full_name='google.devtools.resultstore.v2.TestSuite.failures', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='errors', full_name='google.devtools.resultstore.v2.TestSuite.errors', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timing', full_name='google.devtools.resultstore.v2.TestSuite.timing', index=4,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='properties', full_name='google.devtools.resultstore.v2.TestSuite.properties', index=5,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='files', full_name='google.devtools.resultstore.v2.TestSuite.files', index=6,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=172,
serialized_end=549,
)
_TEST = _descriptor.Descriptor(
name='Test',
full_name='google.devtools.resultstore.v2.Test',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='test_case', full_name='google.devtools.resultstore.v2.Test.test_case', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='test_suite', full_name='google.devtools.resultstore.v2.Test.test_suite', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='test_type', full_name='google.devtools.resultstore.v2.Test.test_type',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=552,
serialized_end=699,
)
_TESTCASE = _descriptor.Descriptor(
name='TestCase',
full_name='google.devtools.resultstore.v2.TestCase',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='case_name', full_name='google.devtools.resultstore.v2.TestCase.case_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='class_name', full_name='google.devtools.resultstore.v2.TestCase.class_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='google.devtools.resultstore.v2.TestCase.result', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='failures', full_name='google.devtools.resultstore.v2.TestCase.failures', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='errors', full_name='google.devtools.resultstore.v2.TestCase.errors', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timing', full_name='google.devtools.resultstore.v2.TestCase.timing', index=5,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='properties', full_name='google.devtools.resultstore.v2.TestCase.properties', index=6,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='files', full_name='google.devtools.resultstore.v2.TestCase.files', index=7,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_TESTCASE_RESULT,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=702,
serialized_end=1233,
)
_TESTFAILURE = _descriptor.Descriptor(
name='TestFailure',
full_name='google.devtools.resultstore.v2.TestFailure',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='failure_message', full_name='google.devtools.resultstore.v2.TestFailure.failure_message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exception_type', full_name='google.devtools.resultstore.v2.TestFailure.exception_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stack_trace', full_name='google.devtools.resultstore.v2.TestFailure.stack_trace', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expected', full_name='google.devtools.resultstore.v2.TestFailure.expected', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='actual', full_name='google.devtools.resultstore.v2.TestFailure.actual', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1235,
serialized_end=1352,
)
_TESTERROR = _descriptor.Descriptor(
name='TestError',
full_name='google.devtools.resultstore.v2.TestError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='error_message', full_name='google.devtools.resultstore.v2.TestError.error_message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exception_type', full_name='google.devtools.resultstore.v2.TestError.exception_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stack_trace', full_name='google.devtools.resultstore.v2.TestError.stack_trace', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1354,
serialized_end=1433,
)
_TESTSUITE.fields_by_name['tests'].message_type = _TEST
_TESTSUITE.fields_by_name['failures'].message_type = _TESTFAILURE
_TESTSUITE.fields_by_name['errors'].message_type = _TESTERROR
_TESTSUITE.fields_by_name['timing'].message_type = google_dot_devtools_dot_resultstore_dot_v2_dot_common__pb2._TIMING
_TESTSUITE.fields_by_name['properties'].message_type = google_dot_devtools_dot_resultstore_dot_v2_dot_common__pb2._PROPERTY
_TESTSUITE.fields_by_name['files'].message_type = google_dot_devtools_dot_resultstore_dot_v2_dot_file__pb2._FILE
_TEST.fields_by_name['test_case'].message_type = _TESTCASE
_TEST.fields_by_name['test_suite'].message_type = _TESTSUITE
_TEST.oneofs_by_name['test_type'].fields.append(
_TEST.fields_by_name['test_case'])
_TEST.fields_by_name['test_case'].containing_oneof = _TEST.oneofs_by_name['test_type']
_TEST.oneofs_by_name['test_type'].fields.append(
_TEST.fields_by_name['test_suite'])
_TEST.fields_by_name['test_suite'].containing_oneof = _TEST.oneofs_by_name['test_type']
_TESTCASE.fields_by_name['result'].enum_type = _TESTCASE_RESULT
_TESTCASE.fields_by_name['failures'].message_type = _TESTFAILURE
_TESTCASE.fields_by_name['errors'].message_type = _TESTERROR
_TESTCASE.fields_by_name['timing'].message_type = google_dot_devtools_dot_resultstore_dot_v2_dot_common__pb2._TIMING
_TESTCASE.fields_by_name['properties'].message_type = google_dot_devtools_dot_resultstore_dot_v2_dot_common__pb2._PROPERTY
_TESTCASE.fields_by_name['files'].message_type = google_dot_devtools_dot_resultstore_dot_v2_dot_file__pb2._FILE
_TESTCASE_RESULT.containing_type = _TESTCASE
DESCRIPTOR.message_types_by_name['TestSuite'] = _TESTSUITE
DESCRIPTOR.message_types_by_name['Test'] = _TEST
DESCRIPTOR.message_types_by_name['TestCase'] = _TESTCASE
DESCRIPTOR.message_types_by_name['TestFailure'] = _TESTFAILURE
DESCRIPTOR.message_types_by_name['TestError'] = _TESTERROR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestSuite = _reflection.GeneratedProtocolMessageType('TestSuite', (_message.Message,), {
'DESCRIPTOR' : _TESTSUITE,
'__module__' : 'google.devtools.resultstore.v2.test_suite_pb2'
# @@protoc_insertion_point(class_scope:google.devtools.resultstore.v2.TestSuite)
})
_sym_db.RegisterMessage(TestSuite)
Test = _reflection.GeneratedProtocolMessageType('Test', (_message.Message,), {
'DESCRIPTOR' : _TEST,
'__module__' : 'google.devtools.resultstore.v2.test_suite_pb2'
# @@protoc_insertion_point(class_scope:google.devtools.resultstore.v2.Test)
})
_sym_db.RegisterMessage(Test)
TestCase = _reflection.GeneratedProtocolMessageType('TestCase', (_message.Message,), {
'DESCRIPTOR' : _TESTCASE,
'__module__' : 'google.devtools.resultstore.v2.test_suite_pb2'
# @@protoc_insertion_point(class_scope:google.devtools.resultstore.v2.TestCase)
})
_sym_db.RegisterMessage(TestCase)
TestFailure = _reflection.GeneratedProtocolMessageType('TestFailure', (_message.Message,), {
'DESCRIPTOR' : _TESTFAILURE,
'__module__' : 'google.devtools.resultstore.v2.test_suite_pb2'
# @@protoc_insertion_point(class_scope:google.devtools.resultstore.v2.TestFailure)
})
_sym_db.RegisterMessage(TestFailure)
TestError = _reflection.GeneratedProtocolMessageType('TestError', (_message.Message,), {
'DESCRIPTOR' : _TESTERROR,
'__module__' : 'google.devtools.resultstore.v2.test_suite_pb2'
# @@protoc_insertion_point(class_scope:google.devtools.resultstore.v2.TestError)
})
_sym_db.RegisterMessage(TestError)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 49.037611 | 2,335 | 0.765351 | 2,907 | 22,165 | 5.508428 | 0.077399 | 0.044464 | 0.087866 | 0.107912 | 0.810029 | 0.791857 | 0.766939 | 0.696996 | 0.666708 | 0.660151 | 0 | 0.038178 | 0.111347 | 22,165 | 451 | 2,336 | 49.146341 | 0.774788 | 0.028468 | 0 | 0.65942 | 1 | 0.004831 | 0.234537 | 0.196849 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.014493 | 0 | 0.014493 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
6293567e2eac6d1c53a488afe6a997e61b1429f8 | 3,502 | py | Python | src/tests/mapping/rows/test_weighted_linear_model.py | zaxmks/demo-data-compliance-service | 372e612c570aaf5b512bec17627f825e880add67 | [
"CNRI-Python",
"CECILL-B"
] | null | null | null | src/tests/mapping/rows/test_weighted_linear_model.py | zaxmks/demo-data-compliance-service | 372e612c570aaf5b512bec17627f825e880add67 | [
"CNRI-Python",
"CECILL-B"
] | null | null | null | src/tests/mapping/rows/test_weighted_linear_model.py | zaxmks/demo-data-compliance-service | 372e612c570aaf5b512bec17627f825e880add67 | [
"CNRI-Python",
"CECILL-B"
] | null | null | null | from mock import Mock
from src.mapping.values.value_match import ValueMatch
from src.mapping.rows.weighted_linear_model import WeightedLinearModel
config = {"weights": {"email": 1, "name": 3, "ssn": 6}, "null_confidence": 0.5}
def test_init():
wlm = WeightedLinearModel(**config)
assert wlm.weights["email"] == 1
assert wlm.null_confidence == 0.5
def test_predict_perfect():
wlm = WeightedLinearModel(**config)
column_relations = [Mock(), Mock(), Mock()]
column_relations[0].target_column_name = "email"
column_relations[1].target_column_name = "name"
column_relations[2].target_column_name = "ssn"
value_match_group = [
ValueMatch(
target_index=1,
confidence=1,
target_text="reed.coke@enron.com",
source_column="email_address",
target_column="email",
),
ValueMatch(
target_index=1,
confidence=1,
target_text="Reed A. Coke",
source_column="fullname",
target_column="name",
),
ValueMatch(
target_index=1,
confidence=1,
target_text="123-45-6789",
source_column="social",
target_column="ssn",
),
]
confidence = wlm.predict(column_relations, value_match_group)
assert confidence > 0.99
def test_predict_with_null_match():
wlm = WeightedLinearModel(**config)
column_relations = [Mock(), Mock(), Mock()]
column_relations[0].target_column_name = "email"
column_relations[1].target_column_name = "name"
column_relations[2].target_column_name = "ssn"
value_match_group = [
ValueMatch(
target_index=1,
confidence=1,
target_text="reed.coke@enron.com",
source_column="email_address",
target_column="email",
),
ValueMatch(
target_index=1,
confidence=1,
target_text="Reed A. Coke",
source_column="fullname",
target_column="name",
),
]
confidence = wlm.predict(column_relations, value_match_group)
assert confidence == 0.7
def test_predict_with_unweighted_column():
wlm = WeightedLinearModel(**config)
column_relations = [Mock(), Mock(), Mock(), Mock()]
column_relations[0].target_column_name = "email"
column_relations[1].target_column_name = "name"
column_relations[2].target_column_name = "ssn"
column_relations[3].target_column_name = "unweighted_column"
value_match_group = [
ValueMatch(
target_index=1,
confidence=1,
target_text="reed.coke@enron.com",
source_column="email_address",
target_column="email",
),
ValueMatch(
target_index=1,
confidence=1,
target_text="Reed A. Coke",
source_column="fullname",
target_column="name",
),
ValueMatch(
target_index=1,
confidence=1,
target_text="123-45-6789",
source_column="social",
target_column="ssn",
),
ValueMatch(
target_index=1,
confidence=1,
target_text="text from unweighted column",
source_column="unweughted_column",
target_column="unweighted_column",
),
]
confidence = wlm.predict(column_relations, value_match_group)
assert confidence > 0.99 | 31.267857 | 79 | 0.597658 | 369 | 3,502 | 5.384824 | 0.154472 | 0.114746 | 0.10468 | 0.099648 | 0.788123 | 0.788123 | 0.764972 | 0.764972 | 0.719678 | 0.719678 | 0 | 0.025 | 0.291833 | 3,502 | 112 | 80 | 31.267857 | 0.77621 | 0 | 0 | 0.813725 | 0 | 0 | 0.107337 | 0 | 0 | 0 | 0 | 0 | 0.04902 | 1 | 0.039216 | false | 0 | 0.029412 | 0 | 0.068627 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
62f3422f98860b5538d2299657828d6c8161f716 | 59 | py | Python | rdf_io/protocols/__init__.py | Ircam-Web/django-rdf-io | 5854fb5e57e92103595c95914c0adb092059dd89 | [
"CC0-1.0"
] | null | null | null | rdf_io/protocols/__init__.py | Ircam-Web/django-rdf-io | 5854fb5e57e92103595c95914c0adb092059dd89 | [
"CC0-1.0"
] | null | null | null | rdf_io/protocols/__init__.py | Ircam-Web/django-rdf-io | 5854fb5e57e92103595c95914c0adb092059dd89 | [
"CC0-1.0"
] | null | null | null | from .api import *
from .rdf4j import *
from .ldp import *
| 14.75 | 20 | 0.694915 | 9 | 59 | 4.555556 | 0.555556 | 0.487805 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021277 | 0.20339 | 59 | 3 | 21 | 19.666667 | 0.851064 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
1a0f3b5591829c58223921a0a6aeaa5ec89a9e49 | 9,883 | py | Python | test/test_model_gradient.py | Argonne-National-Laboratory/pyoptmat | a6e5e8d0b93c77374d4ccbc65a86262eec5df77b | [
"MIT"
] | null | null | null | test/test_model_gradient.py | Argonne-National-Laboratory/pyoptmat | a6e5e8d0b93c77374d4ccbc65a86262eec5df77b | [
"MIT"
] | 1 | 2022-03-30T22:20:38.000Z | 2022-03-31T15:02:22.000Z | test/test_model_gradient.py | Argonne-National-Laboratory/pyoptmat | a6e5e8d0b93c77374d4ccbc65a86262eec5df77b | [
"MIT"
] | 2 | 2021-11-16T15:13:54.000Z | 2022-01-06T21:35:42.000Z | import unittest
import numpy as np
import torch
from torch.autograd import Variable
import torch.nn
from pyoptmat import ode, models, flowrules, hardening, utility, damage
from pyoptmat.temperature import ConstantParameter as CP
torch.set_default_tensor_type(torch.DoubleTensor)
torch.autograd.set_detect_anomaly(True)
def differ(mfn, p0, eps = 1.0e-6):
v0 = mfn(p0).numpy()
puse = p0.numpy()
result = np.zeros(puse.shape)
for ind, val in np.ndenumerate(puse):
dp = np.abs(val) * eps
if dp < eps:
dp = eps
pcurr = np.copy(puse)
pcurr[ind] += dp
v1 = mfn(torch.tensor(pcurr)).numpy()
result[ind] = (v1 - v0) / dp
return result
def simple_diff(fn, p0):
res = []
for i in range(len(p0)):
def mfn(pi):
ps = [pp for pp in p0]
ps[i] = pi
return fn(ps)
res.append(differ(mfn, p0[i]))
return res
class CommonGradient:
def test_gradient_strain(self):
bmodel = self.model_fn([Variable(pi,
requires_grad = True) for pi in self.p])
res = torch.norm(bmodel.solve_strain(self.times, self.strains, self.temperatures))
res.backward()
grad = self.extract_grad(bmodel)
ngrad = simple_diff(
lambda p: torch.norm(self.model_fn(p).solve_strain(self.times, self.strains, self.temperatures)),
self.p)
for i,(p1, p2) in enumerate(zip(grad, ngrad)):
print(i,p1, p2)
self.assertTrue(np.allclose(p1, p2, rtol = 1e-4))
def test_gradient_stress(self):
bmodel = self.model_fn([Variable(pi,
requires_grad = True) for pi in self.p])
res = torch.norm(bmodel.solve_stress(self.times, self.stresses, self.temperatures))
res.backward()
grad = self.extract_grad(bmodel)
ngrad = simple_diff(
lambda p: torch.norm(self.model_fn(p).solve_stress(self.times, self.stresses, self.temperatures)),
self.p)
for i,(p1, p2) in enumerate(zip(grad, ngrad)):
print(i,p1, p2)
self.assertTrue(np.allclose(p1, p2, rtol = 1e-4))
class TestPerfectViscoplasticity(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.p = [self.E, self.n, self.eta]
self.model_fn = lambda p: models.ModelIntegrator(models.InelasticModel(CP(p[0]),
flowrules.PerfectViscoplasticity(CP(p[1]), CP(p[2]))),
use_adjoint = False)
self.extract_grad = lambda m: np.array(
[m.model.E.pvalue.grad.numpy(), m.model.flowrule.n.pvalue.grad.numpy(), m.model.flowrule.eta.pvalue.grad.numpy()])
self.times = torch.transpose(
torch.tensor([np.linspace(0,1,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.strains = torch.transpose(
torch.tensor([np.linspace(0,0.003,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.stresses = torch.transpose(
torch.tensor([np.linspace(0,100.0,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.temperatures = torch.zeros_like(self.strains)
class TestIsotropicOnly(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.s0 = torch.tensor(10.0)
self.p = [self.E, self.n, self.eta, self.s0, self.R, self.d]
self.model_fn = lambda p: models.ModelIntegrator(models.InelasticModel(CP(p[0]),
flowrules.IsoKinViscoplasticity(CP(p[1]), CP(p[2]), CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]),CP(p[5])),
hardening.NoKinematicHardeningModel())),
use_adjoint = False)
self.extract_grad = lambda m: np.array(
[m.model.E.pvalue.grad.numpy(), m.model.flowrule.n.pvalue.grad.numpy(), m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(), m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy()])
self.times = torch.transpose(
torch.tensor([np.linspace(0,1,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.strains = torch.transpose(
torch.tensor([np.linspace(0,0.003,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.stresses = torch.transpose(
torch.tensor([np.linspace(0,200.0,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.temperatures = torch.zeros_like(self.strains)
class TestHardeningViscoplasticity(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.C = torch.tensor(1000.0)
self.g = torch.tensor(10.0)
self.s0 = torch.tensor(10.0)
self.p = [self.E, self.n, self.eta, self.s0, self.R, self.d, self.C, self.g]
self.model_fn = lambda p: models.ModelIntegrator(models.InelasticModel(CP(p[0]),
flowrules.IsoKinViscoplasticity(CP(p[1]), CP(p[2]), CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]),CP(p[5])),
hardening.FAKinematicHardeningModel(CP(p[6]),CP(p[7])))),
use_adjoint = False)
self.extract_grad = lambda m: np.array(
[m.model.E.pvalue.grad.numpy(), m.model.flowrule.n.pvalue.grad.numpy(), m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(), m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy(),
m.model.flowrule.kinematic.C.pvalue.grad.numpy(), m.model.flowrule.kinematic.g.pvalue.grad.numpy()])
self.times = torch.transpose(
torch.tensor([np.linspace(0,1,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.strains = torch.transpose(
torch.tensor([np.linspace(0,0.003,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.stresses = torch.transpose(
torch.tensor([np.linspace(0,200.0,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.temperatures = torch.zeros_like(self.strains)
class TestHardeningViscoplasticityDamage(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 10
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.C = torch.tensor(1000.0)
self.g = torch.tensor(10.0)
self.s0 = torch.tensor(10.0)
self.A = torch.tensor(2000.0)
self.xi = torch.tensor(6.5)
self.phi = torch.tensor(1.7)
self.p = [self.E, self.n, self.eta, self.s0, self.R, self.d, self.C, self.g, self.A, self.xi, self.phi]
self.model_fn = lambda p: models.ModelIntegrator(models.InelasticModel(CP(p[0]),
flowrules.IsoKinViscoplasticity(CP(p[1]), CP(p[2]), CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]),CP(p[5])),
hardening.FAKinematicHardeningModel(CP(p[6]),CP(p[7]))),
dmodel = damage.HayhurstLeckie(CP(p[8]), CP(p[9]), CP(p[10]))),
use_adjoint = False)
self.extract_grad = lambda m: np.array(
[m.model.E.pvalue.grad.numpy(), m.model.flowrule.n.pvalue.grad.numpy(), m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(), m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy(),
m.model.flowrule.kinematic.C.pvalue.grad.numpy(), m.model.flowrule.kinematic.g.pvalue.grad.numpy(),
m.model.dmodel.A.pvalue.grad.numpy(), m.model.dmodel.xi.pvalue.grad.numpy(),
m.model.dmodel.phi.pvalue.grad.numpy()])
self.times = torch.transpose(
torch.tensor([np.linspace(0,1,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.strains = torch.transpose(
torch.tensor([np.linspace(0,0.03,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.stresses = torch.transpose(
torch.tensor([np.linspace(0,200,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.temperatures = torch.zeros_like(self.strains)
class TestChabocheViscoplasticity(unittest.TestCase, CommonGradient):
def setUp(self):
self.ntime = 10
self.nbatch = 4
self.E = torch.tensor(100000.0)
self.n = torch.tensor(5.2)
self.eta = torch.tensor(110.0)
self.R = torch.tensor(100.0)
self.d = torch.tensor(5.1)
self.C = torch.tensor([1000.0,750.0,100.0])
self.g = torch.tensor([10.0, 1.2, 8.6])
self.s0 = torch.tensor(10.0)
self.p = [self.E, self.n, self.eta, self.s0, self.R, self.d, self.C, self.g]
self.model_fn = lambda p: models.ModelIntegrator(models.InelasticModel(CP(p[0]),
flowrules.IsoKinViscoplasticity(CP(p[1]), CP(p[2]), CP(p[3]),
hardening.VoceIsotropicHardeningModel(CP(p[4]),CP(p[5])),
hardening.ChabocheHardeningModel(CP(p[6]),CP(p[7])))),
use_adjoint = False)
self.extract_grad = lambda m: np.array(
[m.model.E.pvalue.grad.numpy(), m.model.flowrule.n.pvalue.grad.numpy(), m.model.flowrule.eta.pvalue.grad.numpy(),
m.model.flowrule.s0.pvalue.grad.numpy(), m.model.flowrule.isotropic.R.pvalue.grad.numpy(),
m.model.flowrule.isotropic.d.pvalue.grad.numpy(),
m.model.flowrule.kinematic.C.pvalue.grad.numpy(), m.model.flowrule.kinematic.g.pvalue.grad.numpy()])
self.times = torch.transpose(
torch.tensor([np.linspace(0,1,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.strains = torch.transpose(
torch.tensor([np.linspace(0,0.003,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.stresses = torch.transpose(
torch.tensor([np.linspace(0,200.0,self.ntime) for i in range(self.nbatch)]), 1, 0)
self.temperatures = torch.zeros_like(self.strains)
| 39.690763 | 122 | 0.65729 | 1,514 | 9,883 | 4.261559 | 0.102378 | 0.088655 | 0.083695 | 0.076875 | 0.850434 | 0.850434 | 0.838965 | 0.835865 | 0.819436 | 0.819436 | 0 | 0.040841 | 0.177476 | 9,883 | 248 | 123 | 39.850806 | 0.75286 | 0 | 0 | 0.668342 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01005 | 1 | 0.050251 | false | 0 | 0.035176 | 0 | 0.130653 | 0.01005 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
a7edb5992f43617f174367e05f77ff26fe34727d | 55 | py | Python | pms7003/__init__.py | m1dnight/pms7003 | 1d063523e40f258eaf8fe857418f81eab80950a7 | [
"MIT"
] | 20 | 2018-12-05T06:30:36.000Z | 2021-11-15T00:18:30.000Z | pms7003/__init__.py | m1dnight/pms7003 | 1d063523e40f258eaf8fe857418f81eab80950a7 | [
"MIT"
] | 4 | 2019-09-06T18:35:03.000Z | 2021-07-01T07:58:03.000Z | pms7003/__init__.py | m1dnight/pms7003 | 1d063523e40f258eaf8fe857418f81eab80950a7 | [
"MIT"
] | 4 | 2019-06-19T15:40:14.000Z | 2021-06-05T08:12:23.000Z | from .pms7003 import *
from .pms7003_threading import * | 27.5 | 32 | 0.8 | 7 | 55 | 6.142857 | 0.571429 | 0.511628 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 0.127273 | 55 | 2 | 32 | 27.5 | 0.729167 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
c507d4ed78c6eb7d2309acbaa9a3849653adc3bf | 96 | py | Python | venv/lib/python3.8/site-packages/pip/_vendor/distlib/markers.py | GiulianaPola/select_repeats | 17a0d053d4f874e42cf654dd142168c2ec8fbd11 | [
"MIT"
] | 2 | 2022-03-13T01:58:52.000Z | 2022-03-31T06:07:54.000Z | venv/lib/python3.8/site-packages/pip/_vendor/distlib/markers.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | 19 | 2021-11-20T04:09:18.000Z | 2022-03-23T15:05:55.000Z | venv/lib/python3.8/site-packages/pip/_vendor/distlib/markers.py | DesmoSearch/Desmobot | b70b45df3485351f471080deb5c785c4bc5c4beb | [
"MIT"
] | null | null | null | /home/runner/.cache/pip/pool/e8/07/37/7027c504445ec621125883979a0f9aa483fc9767ac69f3525f728ccbef | 96 | 96 | 0.895833 | 9 | 96 | 9.555556 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.46875 | 0 | 96 | 1 | 96 | 96 | 0.427083 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
c55575d18071515973916f62cb58788a01868503 | 330 | py | Python | seleniumwire/thirdparty/mitmproxy/addons/__init__.py | KozminMoci/selenium-wire | 063c44ab42ac5e53e28c8a8c49c9ae7036bd878b | [
"MIT"
] | 975 | 2018-06-23T10:50:42.000Z | 2022-03-31T00:56:03.000Z | seleniumwire/thirdparty/mitmproxy/addons/__init__.py | KozminMoci/selenium-wire | 063c44ab42ac5e53e28c8a8c49c9ae7036bd878b | [
"MIT"
] | 492 | 2018-07-30T12:49:51.000Z | 2022-03-31T12:46:56.000Z | seleniumwire/thirdparty/mitmproxy/addons/__init__.py | KozminMoci/selenium-wire | 063c44ab42ac5e53e28c8a8c49c9ae7036bd878b | [
"MIT"
] | 149 | 2018-08-29T06:53:12.000Z | 2022-03-31T09:23:56.000Z | from seleniumwire.thirdparty.mitmproxy.addons import core
from seleniumwire.thirdparty.mitmproxy.addons import streambodies
from seleniumwire.thirdparty.mitmproxy.addons import upstream_auth
def default_addons():
return [
core.Core(),
streambodies.StreamBodies(),
upstream_auth.UpstreamAuth(),
]
| 27.5 | 66 | 0.757576 | 33 | 330 | 7.484848 | 0.424242 | 0.194332 | 0.315789 | 0.425101 | 0.57085 | 0.57085 | 0 | 0 | 0 | 0 | 0 | 0 | 0.163636 | 330 | 11 | 67 | 30 | 0.894928 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | true | 0 | 0.333333 | 0.111111 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 6 |
c56b3a5d44759660f070678700378ed4a1b28b78 | 25 | py | Python | thinkplot/__init__.py | bwsturm/ThinkBayes2 | 79bf1d334fff428c701c33078c9f7c82f10cf751 | [
"MIT"
] | 1 | 2019-12-25T07:05:33.000Z | 2019-12-25T07:05:33.000Z | thinkplot/__init__.py | bwsturm/ThinkBayes2 | 79bf1d334fff428c701c33078c9f7c82f10cf751 | [
"MIT"
] | null | null | null | thinkplot/__init__.py | bwsturm/ThinkBayes2 | 79bf1d334fff428c701c33078c9f7c82f10cf751 | [
"MIT"
] | 1 | 2021-03-11T13:57:03.000Z | 2021-03-11T13:57:03.000Z | from .thinkplot import *
| 12.5 | 24 | 0.76 | 3 | 25 | 6.333333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.16 | 25 | 1 | 25 | 25 | 0.904762 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3dc36ffa856766abce37172db29c2d739916788e | 169 | py | Python | opencolorio_config_aces/config/cg/__init__.py | AcademySoftwareFoundation/OpenColorIO-Config-ACES | 79e07061e28a81d1bb0cbfd6b5d5376a35025b58 | [
"BSD-3-Clause"
] | 52 | 2020-05-19T05:05:11.000Z | 2022-03-29T20:20:42.000Z | opencolorio_config_aces/config/cg/__init__.py | AcademySoftwareFoundation/OpenColorIO-Config-ACES | 79e07061e28a81d1bb0cbfd6b5d5376a35025b58 | [
"BSD-3-Clause"
] | 41 | 2020-05-17T03:18:24.000Z | 2022-03-31T12:02:35.000Z | opencolorio_config_aces/config/cg/__init__.py | AcademySoftwareFoundation/OpenColorIO-Config-ACES | 79e07061e28a81d1bb0cbfd6b5d5376a35025b58 | [
"BSD-3-Clause"
] | 12 | 2020-05-18T18:21:57.000Z | 2022-03-29T20:00:55.000Z | # SPDX-License-Identifier: BSD-3-Clause
# Copyright Contributors to the OpenColorIO Project.
from .generate import generate_config_cg
__all__ = ['generate_config_cg']
| 24.142857 | 52 | 0.804734 | 22 | 169 | 5.818182 | 0.818182 | 0.21875 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006667 | 0.112426 | 169 | 6 | 53 | 28.166667 | 0.846667 | 0.52071 | 0 | 0 | 1 | 0 | 0.230769 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
3dd21e32e3dfa2a1e018fa1fe441e54cfd88855a | 40 | py | Python | gentsp/__init__.py | charon25/GeneticTSPSolver | 224e2232d49e390e690f910de5167409063c23c3 | [
"MIT"
] | null | null | null | gentsp/__init__.py | charon25/GeneticTSPSolver | 224e2232d49e390e690f910de5167409063c23c3 | [
"MIT"
] | null | null | null | gentsp/__init__.py | charon25/GeneticTSPSolver | 224e2232d49e390e690f910de5167409063c23c3 | [
"MIT"
] | null | null | null | from gentsp.genetic_tsp import TSPSolver | 40 | 40 | 0.9 | 6 | 40 | 5.833333 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.075 | 40 | 1 | 40 | 40 | 0.945946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
9a76833e22f689c76e96f834a9f6666db2b1ffed | 1,788 | py | Python | exp/exp007/model_list.py | fkubota/kaggle-Rainforest-Connection-Species-Audio-Detection | 7134edff0ba1c60f597b64a0efd953b7707b98e1 | [
"MIT"
] | 1 | 2021-02-24T03:25:51.000Z | 2021-02-24T03:25:51.000Z | exp/exp008/model_list.py | fkubota/kaggle-Rainforest-Connection-Species-Audio-Detection | 7134edff0ba1c60f597b64a0efd953b7707b98e1 | [
"MIT"
] | null | null | null | exp/exp008/model_list.py | fkubota/kaggle-Rainforest-Connection-Species-Audio-Detection | 7134edff0ba1c60f597b64a0efd953b7707b98e1 | [
"MIT"
] | null | null | null | from ipdb import set_trace as st
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import models
####################################################################
# Resnet50
####################################################################
class ResNet18(nn.Module):
def __init__(self, params):
super().__init__()
self.__class__.__name__ = 'ResNet18'
num_classes = params['n_classes']
pretrained = params['pretrained']
self.resnet = models.resnet18(pretrained=pretrained)
in_features = self.resnet.fc.in_features
self.resnet.avgpool = nn.AdaptiveAvgPool2d(output_size=(1, 1))
self.resnet.fc = nn.Linear(in_features, num_classes)
def forward(self, x):
x = self.resnet(x)
multiclass_proba = F.softmax(x, dim=1)
multilabel_proba = torch.sigmoid(x)
return {
"output": x,
"output_softmax": multiclass_proba,
"output_sigmoid": multilabel_proba
}
class ResNet50(nn.Module):
def __init__(self, params):
super().__init__()
self.__class__.__name__ = 'ResNet50'
num_classes = params['n_classes']
pretrained = params['pretrained']
self.resnet = models.resnet50(pretrained=pretrained)
in_features = self.resnet.fc.in_features
self.resnet.avgpool = nn.AdaptiveAvgPool2d(output_size=(1, 1))
self.resnet.fc = nn.Linear(in_features, num_classes)
def forward(self, x):
x = self.resnet(x)
multiclass_proba = F.softmax(x, dim=1)
multilabel_proba = torch.sigmoid(x)
return {
"output": x,
"output_softmax": multiclass_proba,
"output_sigmoid": multilabel_proba
}
| 32.509091 | 70 | 0.582774 | 193 | 1,788 | 5.093264 | 0.238342 | 0.101729 | 0.056968 | 0.081384 | 0.824008 | 0.824008 | 0.824008 | 0.824008 | 0.824008 | 0.824008 | 0 | 0.016248 | 0.242729 | 1,788 | 54 | 71 | 33.111111 | 0.709749 | 0.004474 | 0 | 0.697674 | 0 | 0 | 0.074481 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.093023 | false | 0 | 0.116279 | 0 | 0.302326 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
9a80668cc34db8f6d97c026026b786a4645e70e3 | 12,664 | py | Python | tests/checks/mock/test_haproxy.py | joewilliams/dd-agent | 489902eeef5b2f66c4878cb8a42e79c54883c042 | [
"BSD-3-Clause"
] | 11 | 2017-09-13T03:31:51.000Z | 2019-06-05T19:38:09.000Z | tests/checks/mock/test_haproxy.py | joewilliams/dd-agent | 489902eeef5b2f66c4878cb8a42e79c54883c042 | [
"BSD-3-Clause"
] | 1 | 2021-02-23T14:54:36.000Z | 2021-02-23T14:54:36.000Z | tests/checks/mock/test_haproxy.py | joewilliams/dd-agent | 489902eeef5b2f66c4878cb8a42e79c54883c042 | [
"BSD-3-Clause"
] | 7 | 2016-09-06T18:13:16.000Z | 2020-11-13T10:54:13.000Z | from collections import defaultdict
import copy
# 3p
import mock
# project
from tests.checks.common import AgentCheckTest
MOCK_DATA = """# pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,
a,FRONTEND,,,1,2,12,1,11,11,0,0,0,,,,,OPEN,,,,,,,,,1,1,0,,,,0,1,0,2,,,,0,1,0,0,0,0,,1,1,1,,,
a,BACKEND,0,0,0,0,12,0,11,11,0,0,,0,0,0,0,UP,0,0,0,,0,1221810,0,,1,1,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
b,FRONTEND,,,1,2,12,11,11,0,0,0,0,,,,,OPEN,,,,,,,,,1,2,0,,,,0,0,0,1,,,,,,,,,,,0,0,0,,,
b,i-1,0,0,0,1,,1,1,0,,0,,0,0,0,0,UP 1/2,1,1,0,0,1,1,30,,1,3,1,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-2,0,0,1,1,,1,1,0,,0,,0,0,0,0,UP 1/2,1,1,0,0,0,1,0,,1,3,2,,71,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-3,0,0,0,1,,1,1,0,,0,,0,0,0,0,UP,1,1,0,0,0,1,0,,1,3,3,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-4,0,0,0,1,,1,1,0,,0,,0,0,0,0,DOWN,1,1,0,0,0,1,0,,1,3,3,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-5,0,0,0,1,,1,1,0,,0,,0,0,0,0,MAINT,1,1,0,0,0,1,0,,1,3,3,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,BACKEND,0,0,1,2,0,421,1,0,0,0,,0,0,0,0,UP,6,6,0,,0,1,0,,1,3,0,,421,,1,0,,1,,,,,,,,,,,,,,0,0,
c,i-1,0,0,0,1,,1,1,0,,0,,0,0,0,0,UP,1,1,0,0,1,1,30,,1,3,1,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
c,i-2,0,0,0,1,,1,1,0,,0,,0,0,0,0,DOWN (agent),1,1,0,0,1,1,30,,1,3,1,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
c,i-3,0,0,0,1,,1,1,0,,0,,0,0,0,0,NO CHECK,1,1,0,0,1,1,30,,1,3,1,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
c,BACKEND,0,0,1,2,0,421,1,0,0,0,,0,0,0,0,UP,6,6,0,,0,1,0,,1,3,0,,421,,1,0,,1,,,,,,,,,,,,,,0,0,
"""
AGG_STATUSES_BY_SERVICE = (
(['status:available', 'service:a'], 1),
(['status:available', 'service:b'], 4),
(['status:unavailable', 'service:b'], 2),
(['status:available', 'service:c'], 1),
(['status:unavailable', 'service:c'], 2)
)
AGG_STATUSES = (
(['status:available'], 6),
(['status:unavailable'], 4)
)
class TestCheckHAProxy(AgentCheckTest):
CHECK_NAME = 'haproxy'
BASE_CONFIG = {
'init_config': None,
'instances': [
{
'url': 'http://localhost/admin?stats',
'collect_status_metrics': True,
}
]
}
def _assert_agg_statuses(self, count_status_by_service=True, collate_status_tags_per_host=False):
expected_statuses = AGG_STATUSES_BY_SERVICE if count_status_by_service else AGG_STATUSES
for tags, value in expected_statuses:
if collate_status_tags_per_host:
# Assert that no aggregate statuses are sent
self.assertMetric('haproxy.count_per_status', tags=tags, count=0)
else:
self.assertMetric('haproxy.count_per_status', value=value, tags=tags)
@mock.patch('requests.get', return_value=mock.Mock(content=MOCK_DATA))
def test_count_per_status_agg_only(self, mock_requests):
config = copy.deepcopy(self.BASE_CONFIG)
# with count_status_by_service set to False
config['instances'][0]['count_status_by_service'] = False
self.run_check(config)
self.assertMetric('haproxy.count_per_status', value=2, tags=['status:open'])
self.assertMetric('haproxy.count_per_status', value=4, tags=['status:up'])
self.assertMetric('haproxy.count_per_status', value=2, tags=['status:down'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:maint'])
self.assertMetric('haproxy.count_per_status', value=0, tags=['status:nolb'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:no_check'])
self._assert_agg_statuses(count_status_by_service=False)
@mock.patch('requests.get', return_value=mock.Mock(content=MOCK_DATA))
def test_count_per_status_by_service(self, mock_requests):
self.run_check(self.BASE_CONFIG)
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:open', 'service:a'])
self.assertMetric('haproxy.count_per_status', value=3, tags=['status:up', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:open', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:down', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:maint', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:up', 'service:c'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:down', 'service:c'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['status:no_check', 'service:c'])
self._assert_agg_statuses()
@mock.patch('requests.get', return_value=mock.Mock(content=MOCK_DATA))
def test_count_per_status_by_service_and_host(self, mock_requests):
config = copy.deepcopy(self.BASE_CONFIG)
config['instances'][0]['collect_status_metrics_by_host'] = True
self.run_check(config)
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:FRONTEND', 'status:open', 'service:a'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:FRONTEND', 'status:open', 'service:b'])
for backend in ['i-1', 'i-2', 'i-3']:
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:%s' % backend, 'status:up', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-4', 'status:down', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-5', 'status:maint', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-1', 'status:up', 'service:c'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-2', 'status:down', 'service:c'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-3', 'status:no_check', 'service:c'])
self._assert_agg_statuses()
@mock.patch('requests.get', return_value=mock.Mock(content=MOCK_DATA))
def test_count_per_status_by_service_and_collate_per_host(self, mock_requests):
config = copy.deepcopy(self.BASE_CONFIG)
config['instances'][0]['collect_status_metrics_by_host'] = True
config['instances'][0]['collate_status_tags_per_host'] = True
self.run_check(config)
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:FRONTEND', 'status:available', 'service:a'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:FRONTEND', 'status:available', 'service:b'])
for backend in ['i-1', 'i-2', 'i-3']:
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:%s' % backend, 'status:available', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-4', 'status:unavailable', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-5', 'status:unavailable', 'service:b'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-1', 'status:available', 'service:c'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-2', 'status:unavailable', 'service:c'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-3', 'status:unavailable', 'service:c'])
self._assert_agg_statuses(collate_status_tags_per_host=True)
@mock.patch('requests.get', return_value=mock.Mock(content=MOCK_DATA))
def test_count_per_status_collate_per_host(self, mock_requests):
config = copy.deepcopy(self.BASE_CONFIG)
config['instances'][0]['collect_status_metrics_by_host'] = True
config['instances'][0]['collate_status_tags_per_host'] = True
config['instances'][0]['count_status_by_service'] = False
self.run_check(config)
self.assertMetric('haproxy.count_per_status', value=2, tags=['backend:FRONTEND', 'status:available'])
self.assertMetric('haproxy.count_per_status', value=2, tags=['backend:i-1', 'status:available'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-2', 'status:available'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-2', 'status:unavailable'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-3', 'status:available'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-3', 'status:unavailable'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-4', 'status:unavailable'])
self.assertMetric('haproxy.count_per_status', value=1, tags=['backend:i-5', 'status:unavailable'])
self._assert_agg_statuses(count_status_by_service=False, collate_status_tags_per_host=True)
# This mock is only useful to make the first `run_check` run w/o errors (which in turn is useful only to initialize the check)
@mock.patch('requests.get', return_value=mock.Mock(content=MOCK_DATA))
def test_count_hosts_statuses(self, mock_requests):
self.run_check(self.BASE_CONFIG)
data = """# pxname,svname,qcur,qmax,scur,smax,slim,stot,bin,bout,dreq,dresp,ereq,econ,eresp,wretr,wredis,status,weight,act,bck,chkfail,chkdown,lastchg,downtime,qlimit,pid,iid,sid,throttle,lbtot,tracked,type,rate,rate_lim,rate_max,check_status,check_code,check_duration,hrsp_1xx,hrsp_2xx,hrsp_3xx,hrsp_4xx,hrsp_5xx,hrsp_other,hanafail,req_rate,req_rate_max,req_tot,cli_abrt,srv_abrt,
a,FRONTEND,,,1,2,12,1,11,11,0,0,0,,,,,OPEN,,,,,,,,,1,1,0,,,,0,1,0,2,,,,0,1,0,0,0,0,,1,1,1,,,
a,BACKEND,0,0,0,0,12,0,11,11,0,0,,0,0,0,0,UP,0,0,0,,0,1221810,0,,1,1,0,,0,,1,0,,0,,,,0,0,0,0,0,0,,,,,0,0,
b,FRONTEND,,,1,2,12,11,11,0,0,0,0,,,,,OPEN,,,,,,,,,1,2,0,,,,0,0,0,1,,,,,,,,,,,0,0,0,,,
b,i-1,0,0,0,1,,1,1,0,,0,,0,0,0,0,UP 1/2,1,1,0,0,1,1,30,,1,3,1,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-2,0,0,1,1,,1,1,0,,0,,0,0,0,0,UP 1/2,1,1,0,0,0,1,0,,1,3,2,,71,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-3,0,0,0,1,,1,1,0,,0,,0,0,0,0,UP,1,1,0,0,0,1,0,,1,3,3,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-4,0,0,0,1,,1,1,0,,0,,0,0,0,0,DOWN,1,1,0,0,0,1,0,,1,3,3,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,i-5,0,0,0,1,,1,1,0,,0,,0,0,0,0,MAINT,1,1,0,0,0,1,0,,1,3,3,,70,,2,0,,1,1,,0,,,,,,,0,,,,0,0,
b,BACKEND,0,0,1,2,0,421,1,0,0,0,,0,0,0,0,UP,6,6,0,,0,1,0,,1,3,0,,421,,1,0,,1,,,,,,,,,,,,,,0,0,
""".split('\n')
# per service
self.check._process_data(data, True, False, collect_status_metrics=True,
collect_status_metrics_by_host=False)
expected_hosts_statuses = defaultdict(int)
expected_hosts_statuses[('b', 'open')] = 1
expected_hosts_statuses[('b', 'up')] = 3
expected_hosts_statuses[('b', 'down')] = 1
expected_hosts_statuses[('b', 'maint')] = 1
expected_hosts_statuses[('a', 'open')] = 1
self.assertEquals(self.check.hosts_statuses, expected_hosts_statuses)
# backend hosts
agg_statuses = self.check._process_backend_hosts_metric(expected_hosts_statuses)
expected_agg_statuses = {
'a': {'available': 0, 'unavailable': 0},
'b': {'available': 3, 'unavailable': 2},
}
self.assertEquals(expected_agg_statuses, dict(agg_statuses))
# with process_events set to True
self.check._process_data(data, True, True, collect_status_metrics=True,
collect_status_metrics_by_host=False)
self.assertEquals(self.check.hosts_statuses, expected_hosts_statuses)
# per host
self.check._process_data(data, True, False, collect_status_metrics=True,
collect_status_metrics_by_host=True)
expected_hosts_statuses = defaultdict(int)
expected_hosts_statuses[('b', 'FRONTEND', 'open')] = 1
expected_hosts_statuses[('a', 'FRONTEND', 'open')] = 1
expected_hosts_statuses[('b', 'i-1', 'up')] = 1
expected_hosts_statuses[('b', 'i-2', 'up')] = 1
expected_hosts_statuses[('b', 'i-3', 'up')] = 1
expected_hosts_statuses[('b', 'i-4', 'down')] = 1
expected_hosts_statuses[('b', 'i-5', 'maint')] = 1
self.assertEquals(self.check.hosts_statuses, expected_hosts_statuses)
self.check._process_data(data, True, True, collect_status_metrics=True,
collect_status_metrics_by_host=True)
self.assertEquals(self.check.hosts_statuses, expected_hosts_statuses)
| 60.884615 | 390 | 0.645136 | 2,137 | 12,664 | 3.655124 | 0.079551 | 0.062988 | 0.061452 | 0.048137 | 0.855588 | 0.842018 | 0.805531 | 0.779414 | 0.779414 | 0.719882 | 0 | 0.076712 | 0.138424 | 12,664 | 207 | 391 | 61.178744 | 0.639171 | 0.022663 | 0 | 0.357576 | 0 | 0.145455 | 0.446268 | 0.320207 | 0 | 0 | 0 | 0 | 0.309091 | 1 | 0.042424 | false | 0 | 0.024242 | 0 | 0.084848 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
9aae6e30cb28d873851afa30daaa4531e13b1cc7 | 25 | py | Python | sample/__init__.py | prediction-vc/exchanges-data | a15d01222fe43987483a48a3669ed89539417771 | [
"Apache-2.0"
] | null | null | null | sample/__init__.py | prediction-vc/exchanges-data | a15d01222fe43987483a48a3669ed89539417771 | [
"Apache-2.0"
] | null | null | null | sample/__init__.py | prediction-vc/exchanges-data | a15d01222fe43987483a48a3669ed89539417771 | [
"Apache-2.0"
] | null | null | null | from .scraper import main | 25 | 25 | 0.84 | 4 | 25 | 5.25 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12 | 25 | 1 | 25 | 25 | 0.954545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
b13ead3043d443d93feb61d3f083d6486a03edc8 | 196 | py | Python | flow/core/__init__.py | richardschris/flow | c9bb0cbbb268320181d85967b3aec8096c3eb416 | [
"MIT"
] | null | null | null | flow/core/__init__.py | richardschris/flow | c9bb0cbbb268320181d85967b3aec8096c3eb416 | [
"MIT"
] | null | null | null | flow/core/__init__.py | richardschris/flow | c9bb0cbbb268320181d85967b3aec8096c3eb416 | [
"MIT"
] | null | null | null | class Base:
name = None
def __init__(self, name=None, *args, **kwargs):
self.name = name or ''
def __repr__(self):
return f'<{self.__class__.__name__}: {self.name}>'
| 21.777778 | 58 | 0.586735 | 25 | 196 | 3.96 | 0.52 | 0.242424 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.255102 | 196 | 8 | 59 | 24.5 | 0.678082 | 0 | 0 | 0 | 0 | 0 | 0.204082 | 0.137755 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0.166667 | 0.833333 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
b18b5ebc7bf48829bc6242d7e0626f8302409628 | 11,767 | py | Python | chunks/tests.py | Kreeg/django-chunks-jinja2 | 0c9871240ecc563ed758b1b7a9b1c5edb557b135 | [
"BSD-3-Clause"
] | null | null | null | chunks/tests.py | Kreeg/django-chunks-jinja2 | 0c9871240ecc563ed758b1b7a9b1c5edb557b135 | [
"BSD-3-Clause"
] | null | null | null | chunks/tests.py | Kreeg/django-chunks-jinja2 | 0c9871240ecc563ed758b1b7a9b1c5edb557b135 | [
"BSD-3-Clause"
] | null | null | null | from django.test import TestCase
from django.test.utils import override_settings
from django.template import Context, Template, TemplateSyntaxError
from django.core.cache import cache
from django.utils.translation import activate
from chunks.models import Chunk
is_jinja_installed = True
try:
import jinja2
import django_jinja
except ImportError:
is_jinja_installed = False
class BaseTestCase(TestCase):
def setUp(self):
self.home_page_left = Chunk.objects.create(
key='home_page_left',
content='This is the content for left box')
cache.delete('chunk_home_page_left')
def render_template(self, content_string, context={}):
template = Template(content_string)
return template.render(Context(context))
class ChunkTestCase(BaseTestCase):
def test_cache_post_delete(self):
cache_key = 'chunk_home_page_left'
self.assertFalse(cache.get(cache_key), "key %r should NOT be cached" % cache_key)
self.render_template("{% load chunks %}"
"<div>{% chunk 'home_page_left' 10 %}</div>")
cached_result = cache.get(cache_key)
self.assertTrue(cached_result, "key %r should be cached" % cache_key)
Chunk.objects.all().delete()
cached_result = cache.get(cache_key)
self.assertFalse(cached_result, "key %r should NOT be cached" % cache_key)
def test_cache_post_save(self):
cache_key = 'chunk_home_page_left'
self.assertFalse(cache.get(cache_key), "key %r should NOT be cached" % cache_key)
self.render_template("{% load chunks %}"
"<div>{% chunk 'home_page_left' 10 %}</div>")
cached_result = cache.get(cache_key)
self.assertTrue(cached_result, "key %r should be cached" % cache_key)
c = Chunk.objects.get(key='home_page_left')
c.content = 'new'
c.save()
cached_result = cache.get(cache_key)
self.assertFalse(cached_result, "key %r should NOT be cached" % cache_key)
result = self.render_template('{% load chunks %}'
'<div>{% chunk "home_page_left" %}</div>')
self.assertEquals('<div>new</div>', result)
class ChunkTemplateTagTestCase(BaseTestCase):
def test_should_render_content_from_key(self):
result = self.render_template('{% load chunks %}'
'<div>{% chunk "home_page_left" %}</div>')
self.assertEquals('<div>This is the content for left box</div>', result)
def test_should_render_empty_string_if_key_not_found(self):
result = self.render_template('{% load chunks %}'
'<div>{% chunk "key_not_found" %}</div>')
self.assertEquals('<div></div>', result)
def test_should_cache_rendered_content(self):
cache_key = 'chunk_home_page_left'
self.assertFalse(cache.get(cache_key), "key %r should NOT be cached" % cache_key)
self.render_template("{% load chunks %}"
"<div>{% chunk 'home_page_left' 10 %}</div>")
cached_result = cache.get(cache_key)
self.assertTrue(cached_result, "key %r should be cached" % cache_key)
self.assertEquals('This is the content for left box', cached_result.content)
def test_should_fail_if_wrong_number_of_arguments(self):
with self.assertRaisesRegexp(TemplateSyntaxError, "'chunk' tag should have either 2 or 3 arguments"):
self.render_template('{% load chunks %}'
'{% chunk %}')
with self.assertRaisesRegexp(TemplateSyntaxError, "'chunk' tag should have either 2 or 3 arguments"):
self.render_template('{% load chunks %}'
'{% chunk "home_page_left" 10 "invalid" %}')
with self.assertRaisesRegexp(TemplateSyntaxError, "'chunk' tag should have either 2 or 3 arguments"):
self.render_template('{% load chunks %}'
'{% chunk "home_page_left" 10 too much invalid arguments %}')
def test_should_fail_if_key_not_quoted(self):
with self.assertRaisesRegexp(TemplateSyntaxError, "'chunk' tag's argument should be in quotes"):
self.render_template('{% load chunks %}'
'{% chunk home_page_left %}')
with self.assertRaisesRegexp(TemplateSyntaxError, "'chunk' tag's argument should be in quotes"):
self.render_template('{% load chunks %}'
'{% chunk "home_page_left\' %}')
class GetChunkTemplateTagTestCase(BaseTestCase):
def test_should_get_chunk_object_given_key(self):
result = self.render_template('{% load chunks %}'
'{% get_chunk "home_page_left" as chunk_obj %}'
'<p>{{ chunk_obj.content }}</p>')
self.assertEquals('<p>This is the content for left box</p>', result)
def test_should_assign_varname_to_none_if_chunk_not_found(self):
result = self.render_template('{% load chunks %}'
'{% get_chunk "chunk_not_found" as chunk_obj %}'
'{{ chunk_obj }}')
self.assertEquals('None', result)
def test_should_fail_if_wrong_number_of_arguments(self):
with self.assertRaisesRegexp(TemplateSyntaxError, 'Invalid syntax. Usage: {% get_chunk "key" as varname %}'):
self.render_template('{% load chunks %}'
'{% get_chunk %}')
with self.assertRaisesRegexp(TemplateSyntaxError, 'Invalid syntax. Usage: {% get_chunk "key" as varname %}'):
self.render_template('{% load chunks %}'
'{% get_chunk "home_page_left" %}')
with self.assertRaisesRegexp(TemplateSyntaxError, 'Invalid syntax. Usage: {% get_chunk "key" as varname %}'):
self.render_template('{% load chunks %}'
'{% get_chunk "home_page_left" as %}')
with self.assertRaisesRegexp(TemplateSyntaxError, 'Invalid syntax. Usage: {% get_chunk "key" as varname %}'):
self.render_template('{% load chunks %}'
'{% get_chunk "home_page_left" notas chunk_obj %}')
with self.assertRaisesRegexp(TemplateSyntaxError, 'Invalid syntax. Usage: {% get_chunk "key" as varname %}'):
self.render_template('{% load chunks %}'
'{% get_chunk "home_page_left" as chunk_obj invalid %}')
def test_should_fail_if_key_not_quoted(self):
with self.assertRaisesRegexp(TemplateSyntaxError, "Key argument to u'get_chunk' must be in quotes"):
result = self.render_template('{% load chunks %}'
'{% get_chunk home_page_left as chunk_obj %}')
with self.assertRaisesRegexp(TemplateSyntaxError, "Key argument to u'get_chunk' must be in quotes"):
result = self.render_template('{% load chunks %}'
'{% get_chunk "home_page_left\' as chunk_obj %}')
@override_settings(USE_I18N=True)
class I18NBaseTestCase(TestCase):
def setUp(self):
self.home_page_left_en = Chunk.objects.create(
key='home_page_left_en',
content='This is the content for left box')
cache.delete('cache_home_page_left_en')
self.home_page_left_ru = Chunk.objects.create(
key='home_page_left_ru',
content='This is the russian content for left box')
cache.delete('cache_home_page_left_ru')
def render_template(self, content_string, context={}):
template = Template(content_string)
return template.render(Context(context))
class I18NChuckTemplateTagTestCase(I18NBaseTestCase):
def test_should_render_content_from_key(self):
result = self.render_template('{% load chunks %}'
'<div>{% chunk "home_page_left" %}</div>')
self.assertEquals('<div>This is the content for left box</div>', result)
@override_settings(LANGUAGE_CODE='ru-RU')
def test_should_render_content_with_another_lang(self):
result = self.render_template('{% load chunks %}'
'<div>{% chunk "home_page_left" %}</div>')
self.assertEquals('<div>This is the russian content for left box</div>', result)
def test_should_cache_rendered_content(self):
cache_key = 'chunk_home_page_left_en'
self.assertFalse(cache.get(cache_key), "key %r should NOT be cached" % cache_key)
self.render_template("{% load chunks %}"
"<div>{% chunk 'home_page_left' 10 %}</div>")
cached_result = cache.get(cache_key)
self.assertTrue(cached_result, "key %r should be cached" % cache_key)
self.assertEquals('This is the content for left box', cached_result.content)
@override_settings(LANGUAGE_CODE='ru-RU')
def test_should_cache_rendered_content_with_another_lang(self):
cache_key = 'chunk_home_page_left_ru'
self.assertFalse(cache.get(cache_key), "key %r should NOT be cached" % cache_key)
self.render_template("{% load chunks %}"
"<div>{% chunk 'home_page_left' 10 %}</div>")
cached_result = cache.get(cache_key)
self.assertTrue(cached_result, "key %r should be cached" % cache_key)
self.assertEquals('This is the russian content for left box', cached_result.content)
if is_jinja_installed:
@override_settings(
JINJA2_GLOBALS={
'chunk': 'chunks.contrib.jinja_support.chunk',
'get_chunk': 'chunks.contrib.jinja_support.get_chunk'
},
INSTALLED_APPS=('chunks', 'django_jinja'),
TEMPLATE_LOADERS = (
'django_jinja.loaders.AppLoader',
'django_jinja.loaders.FileSystemLoader',
)
)
class Jinja2SupportTest(BaseTestCase):
def setUp(self):
from django_jinja.base import env
self.env = env
super(Jinja2SupportTest, self).setUp()
def render_template(self, content_string, context={}):
template = self.env.from_string(content_string)
return template.render()
class Jinja2ChunkTemplateTagTestCase(Jinja2SupportTest):
def test_should_render_content_from_key(self):
result = self.render_template('<div>{{ chunk("home_page_left") }}</div>')
self.assertEquals('<div>This is the content for left box</div>', result)
def test_should_cache_rendered_content(self):
cache_key = 'chunk_home_page_left'
self.assertFalse(cache.get(cache_key), "key %r should NOT be cached" % cache_key)
self.render_template("<div>{{ chunk('home_page_left', 10) }}</div>")
cached_result = cache.get(cache_key)
self.assertTrue(cached_result, "key %r should be cached" % cache_key)
self.assertEquals('This is the content for left box', cached_result.content)
class Jinja2GetChunkTemplateTagTestCase(Jinja2SupportTest):
def test_should_get_chunk_object_given_key(self):
result = self.render_template('{% with object = get_chunk("home_page_left") %}'
'<p>{{ object.content }}</p>{% endwith %}')
self.assertEquals('<p>This is the content for left box</p>', result)
def test_should_assign_varname_to_none_if_chunk_not_found(self):
result = self.render_template('{% with object = get_chunk("fake") %}{{ object }}{% endwith %}')
self.assertEquals('None', result) | 43.581481 | 117 | 0.622079 | 1,356 | 11,767 | 5.137906 | 0.098083 | 0.044783 | 0.067174 | 0.073202 | 0.80666 | 0.784269 | 0.78025 | 0.75915 | 0.735037 | 0.706617 | 0 | 0.004395 | 0.265148 | 11,767 | 270 | 118 | 43.581481 | 0.801318 | 0 | 0 | 0.558974 | 0 | 0 | 0.298436 | 0.025833 | 0 | 0 | 0 | 0 | 0.205128 | 1 | 0.128205 | false | 0 | 0.051282 | 0 | 0.241026 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
491f767778852a920ded312cf8686196c6a5316f | 24 | py | Python | pyrocoto/__init__.py | AdamSchnapp/pyrocoto | fa2661da268cdf8c239b01248f963c306d9251de | [
"MIT"
] | null | null | null | pyrocoto/__init__.py | AdamSchnapp/pyrocoto | fa2661da268cdf8c239b01248f963c306d9251de | [
"MIT"
] | null | null | null | pyrocoto/__init__.py | AdamSchnapp/pyrocoto | fa2661da268cdf8c239b01248f963c306d9251de | [
"MIT"
] | null | null | null | from .pyrocoto import *
| 12 | 23 | 0.75 | 3 | 24 | 6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 24 | 1 | 24 | 24 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
492fac1b7c971e6a9b71eeb5bc58f6481d8891b1 | 85 | py | Python | good_spot/common/models.py | jasmine92122/NightClubBackend | 7f59129b78baaba0e0c25de2b493033b858f1b00 | [
"MIT"
] | 1 | 2021-02-01T19:04:39.000Z | 2021-02-01T19:04:39.000Z | habitat/building/models/light.py | matrach/habitatOS | 1ae2a3caf6f279cf6d6d20bcd81f24d50f61d7d3 | [
"MIT"
] | 5 | 2020-02-12T03:13:11.000Z | 2022-01-13T01:41:14.000Z | good_spot/common/models.py | jasmine92122/NightClubBackend | 7f59129b78baaba0e0c25de2b493033b858f1b00 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils.translation import ugettext_lazy as _
| 28.333333 | 55 | 0.847059 | 13 | 85 | 5.384615 | 0.769231 | 0.285714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.117647 | 85 | 2 | 56 | 42.5 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
498aaf6f8121e45aaf526024194783a91a825924 | 45 | py | Python | CodeWars/7 Kyu/Alternate case.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Alternate case.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | CodeWars/7 Kyu/Alternate case.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | def alternateCase(s):
return s.swapcase() | 22.5 | 23 | 0.711111 | 6 | 45 | 5.333333 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.155556 | 45 | 2 | 23 | 22.5 | 0.842105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
499a77d1c2f9d69a031122e60ccef0e432117f3f | 370 | py | Python | Modulo-03/ex109/ex109.py | Matheus-Henrique-Burey/Curso-de-Python | 448aebaab96527affa1e45897a662bb0407c11c6 | [
"MIT"
] | null | null | null | Modulo-03/ex109/ex109.py | Matheus-Henrique-Burey/Curso-de-Python | 448aebaab96527affa1e45897a662bb0407c11c6 | [
"MIT"
] | null | null | null | Modulo-03/ex109/ex109.py | Matheus-Henrique-Burey/Curso-de-Python | 448aebaab96527affa1e45897a662bb0407c11c6 | [
"MIT"
] | null | null | null | import moeda
preco = float(input('Digite um preço:R$ '))
print(f'O dobro de {moeda.moeda(preco)} é {moeda.dobro(preco, True)}')
print(f'A metade de {moeda.moeda(preco)} é {moeda.metade(preco, True)}')
print(f'Almentando 10% de {moeda.moeda(preco)} é {moeda.almentar(preco, 10, True)}')
print(f'Desconto de 10% {moeda.moeda(preco)} é {moeda.diminuir(preco, 10, True)}')
| 46.25 | 84 | 0.697297 | 63 | 370 | 4.095238 | 0.365079 | 0.193798 | 0.232558 | 0.248062 | 0.348837 | 0.267442 | 0 | 0 | 0 | 0 | 0 | 0.024169 | 0.105405 | 370 | 7 | 85 | 52.857143 | 0.755287 | 0 | 0 | 0 | 0 | 0.333333 | 0.775676 | 0.118919 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.666667 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
b8feb5bc0d98ad99a96018b500aa1d90e42aba3c | 2,561 | py | Python | resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtXml/QDomDocument.py | basepipe/developer_onboarding | 05b6a776f8974c89517868131b201f11c6c2a5ad | [
"MIT"
] | 1 | 2020-04-20T02:27:20.000Z | 2020-04-20T02:27:20.000Z | resources/dot_PyCharm/system/python_stubs/cache/d1acfdaecbc43dfcba0c1287ba0e29c0ef0e2d37695269b2505d76ec531b8b76/PySide/QtXml/QDomDocument.py | basepipe/developer_onboarding | 05b6a776f8974c89517868131b201f11c6c2a5ad | [
"MIT"
] | null | null | null | resources/dot_PyCharm/system/python_stubs/cache/d1acfdaecbc43dfcba0c1287ba0e29c0ef0e2d37695269b2505d76ec531b8b76/PySide/QtXml/QDomDocument.py | basepipe/developer_onboarding | 05b6a776f8974c89517868131b201f11c6c2a5ad | [
"MIT"
] | null | null | null | # encoding: utf-8
# module PySide.QtXml
# from C:\Python27\lib\site-packages\PySide\QtXml.pyd
# by generator 1.147
# no doc
# imports
import Shiboken as __Shiboken
from QDomNode import QDomNode
class QDomDocument(QDomNode):
# no doc
def createAttribute(self, *args, **kwargs): # real signature unknown
pass
def createAttributeNS(self, *args, **kwargs): # real signature unknown
pass
def createCDATASection(self, *args, **kwargs): # real signature unknown
pass
def createComment(self, *args, **kwargs): # real signature unknown
pass
def createDocumentFragment(self, *args, **kwargs): # real signature unknown
pass
def createElement(self, *args, **kwargs): # real signature unknown
pass
def createElementNS(self, *args, **kwargs): # real signature unknown
pass
def createEntityReference(self, *args, **kwargs): # real signature unknown
pass
def createProcessingInstruction(self, *args, **kwargs): # real signature unknown
pass
def createTextNode(self, *args, **kwargs): # real signature unknown
pass
def doctype(self, *args, **kwargs): # real signature unknown
pass
def documentElement(self, *args, **kwargs): # real signature unknown
pass
def elementById(self, *args, **kwargs): # real signature unknown
pass
def elementsByTagName(self, *args, **kwargs): # real signature unknown
pass
def elementsByTagNameNS(self, *args, **kwargs): # real signature unknown
pass
def implementation(self, *args, **kwargs): # real signature unknown
pass
def importNode(self, *args, **kwargs): # real signature unknown
pass
def nodeType(self, *args, **kwargs): # real signature unknown
pass
def setContent(self, *args, **kwargs): # real signature unknown
pass
def toByteArray(self, *args, **kwargs): # real signature unknown
pass
def toString(self, *args, **kwargs): # real signature unknown
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __nonzero__(self): # real signature unknown; restored from __doc__
""" x.__nonzero__() <==> x != 0 """
pass
| 27.244681 | 84 | 0.642718 | 285 | 2,561 | 5.642105 | 0.259649 | 0.202114 | 0.310945 | 0.257463 | 0.628109 | 0.628109 | 0.584577 | 0.560945 | 0 | 0 | 0 | 0.004167 | 0.250293 | 2,561 | 93 | 85 | 27.537634 | 0.833333 | 0.337368 | 0 | 0.462963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.462963 | false | 0.462963 | 0.055556 | 0 | 0.537037 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 6 |
7700f8d593b412e6009faaec06e2e563024041c8 | 92 | py | Python | src/images/__init__.py | xgroleau/s8-app3 | 3cc18a0862a736f8dadac08b41f30f9956a85213 | [
"MIT"
] | null | null | null | src/images/__init__.py | xgroleau/s8-app3 | 3cc18a0862a736f8dadac08b41f30f9956a85213 | [
"MIT"
] | null | null | null | src/images/__init__.py | xgroleau/s8-app3 | 3cc18a0862a736f8dadac08b41f30f9956a85213 | [
"MIT"
] | null | null | null | from .color_transformation import *
from .image_collection import *
from .selector import *
| 23 | 35 | 0.804348 | 11 | 92 | 6.545455 | 0.636364 | 0.277778 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 92 | 3 | 36 | 30.666667 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
7715cfc6543912fdb0069bf43b9344a318cc560a | 651 | py | Python | mailmojo_sdk/api/__init__.py | eliksir/mailmojo-python-sdk | 99dfd209f190c5320663ce762dd36b7b8b2065f4 | [
"Apache-2.0"
] | null | null | null | mailmojo_sdk/api/__init__.py | eliksir/mailmojo-python-sdk | 99dfd209f190c5320663ce762dd36b7b8b2065f4 | [
"Apache-2.0"
] | 2 | 2017-01-13T14:23:41.000Z | 2017-02-07T10:43:14.000Z | mailmojo_sdk/api/__init__.py | eliksir/mailmojo-python-sdk | 99dfd209f190c5320663ce762dd36b7b8b2065f4 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from mailmojo_sdk.api.account_api import AccountApi
from mailmojo_sdk.api.automation_api import AutomationApi
from mailmojo_sdk.api.contact_api import ContactApi
from mailmojo_sdk.api.embed_api import EmbedApi
from mailmojo_sdk.api.form_api import FormApi
from mailmojo_sdk.api.list_api import ListApi
from mailmojo_sdk.api.newsletter_api import NewsletterApi
from mailmojo_sdk.api.page_api import PageApi
from mailmojo_sdk.api.segment_api import SegmentApi
from mailmojo_sdk.api.template_api import TemplateApi
from mailmojo_sdk.api.webhook_api import WebhookApi
| 38.294118 | 57 | 0.870968 | 100 | 651 | 5.4 | 0.34 | 0.244444 | 0.305556 | 0.366667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001689 | 0.09063 | 651 | 16 | 58 | 40.6875 | 0.910473 | 0.06298 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
621322b7efe80b3b644a28bb12d52d6f1fa46f4e | 96 | py | Python | Python/Introduction/python-power-mod-power.py | mateusnr/hackerrank-solutions | 2fa60bae480d8afb46e3d99929707a7d9d92858f | [
"CC0-1.0"
] | 1 | 2015-08-01T04:03:47.000Z | 2015-08-01T04:03:47.000Z | Python/Introduction/python-power-mod-power.py | mateusnr/hackerrank-solutions | 2fa60bae480d8afb46e3d99929707a7d9d92858f | [
"CC0-1.0"
] | null | null | null | Python/Introduction/python-power-mod-power.py | mateusnr/hackerrank-solutions | 2fa60bae480d8afb46e3d99929707a7d9d92858f | [
"CC0-1.0"
] | 4 | 2020-05-04T15:12:21.000Z | 2021-02-18T11:58:30.000Z | a = int(raw_input())
b = int(raw_input())
m = int(raw_input())
print pow(a,b)
print pow(a,b,m)
| 13.714286 | 20 | 0.625 | 21 | 96 | 2.714286 | 0.380952 | 0.315789 | 0.578947 | 0.350877 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.145833 | 96 | 6 | 21 | 16 | 0.695122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.4 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
656682af2018cfa17ca8ca114edea50d330de030 | 168 | py | Python | tests/fixtures/multilinestring/__init__.py | phuntimes/mongoshapes | f461c67343c32c6b97af8d67a269b4de492d1d71 | [
"MIT"
] | 1 | 2020-11-26T05:58:23.000Z | 2020-11-26T05:58:23.000Z | tests/fixtures/multilinestring/__init__.py | Sean-McVeigh/mongoshapes | f461c67343c32c6b97af8d67a269b4de492d1d71 | [
"MIT"
] | null | null | null | tests/fixtures/multilinestring/__init__.py | Sean-McVeigh/mongoshapes | f461c67343c32c6b97af8d67a269b4de492d1d71 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from mongoshapes import MultiLineStringDict as GeoDict
from .fixtures import geojson
from .fixtures import geointerface
| 21 | 54 | 0.767857 | 21 | 168 | 6.142857 | 0.761905 | 0.186047 | 0.27907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006897 | 0.136905 | 168 | 7 | 55 | 24 | 0.882759 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
658984871bde4cf6309c19f9362485acf94463b0 | 2,002 | py | Python | src/hamm.py | nijaru/rosalind | ffa640d9c61d9fdd2967287f5dd3ee0ced188941 | [
"MIT"
] | null | null | null | src/hamm.py | nijaru/rosalind | ffa640d9c61d9fdd2967287f5dd3ee0ced188941 | [
"MIT"
] | null | null | null | src/hamm.py | nijaru/rosalind | ffa640d9c61d9fdd2967287f5dd3ee0ced188941 | [
"MIT"
] | null | null | null | S1 = "CAGGGCAAGGGCTAACAATACTAATCGAAACCCGGCAAACTTTCCTATCATAGCTTCAACAAGCTCAGACTACGGCGTGGACCGATGATGCTGCTTTATCTCTGGTGAAGGTCGCGCCTGGTCCCTCCTTGGCATAATATTGCGTCATTAGTAGGCCGACAATTTGAGACGTGCGACTTCATCGCGCAAGAAATTACGCCGCAATGGCCTAGGCTACACAGAAGCTAGATTCATGTGATCCTCCCTGACACAGTTATATGCCCATTCGACCAACAGCGGTAGAAGTTTCACGCAACAGAGCTCAACGTGGCGGTCGGATGCAAGTAAGGAGGCTGCAGCGACCGGAGGTTAGCACATCCAGCGATGGACGGACGTATTTAGGGGTGTGACGGCAAATATTCCTTAATTATGGAGGCCTGTACCCCGAACACTATTTTAGAGTTTGCAATCCTTGAACGCACGACCGAGGCAACGACCTGTGTAGGGGTCCACTGCACCGACGATGTCATCCTCTGCCATACCGATGTGGTCTCGACGGTCGCCAGATACCTCCAGACTACGTATTTTTCGCGGTTGCCGGCGGGGGTATGTCGCCGTTGATGACTAGTGAGGTGTACCGCCGGCTCATTACAGATAGTGGGTATGGTATTCTATTAACGGCCCCAATTCGTGCCCACCGCATTGAAAGGACCGATGAAACATTCAATGTCTAGTACCCTTTGGAAGGGACGAGACTTATGTCACCACGTCAGATCGGAAATCCCTGCCAGGGGTTTAATGAAGATCAATGGGAGAGACTTTAAGGGAGCAGTCACCTATCCATTTAGCGTGTATGGAGGTATGGCAGCAGGGTTATTCGATCCTAGTCTTCATCTGATGGCTCAATGAAGGCTCTAAAGACGGCCGCCCGTTCTACATATACAGCA"
S2 = "GGGAGCTTGGTGTCAATCGAACGTTAGACGTCAGGTATTCGAACCTAACTTAGGTTCGTCGAGCGCAGTCTATTGCTTTTAGAGAAAATGCAGCTCGGTTTCTCTACGAGGTCGTCCTTTATCCATTTTGGCCACAAAATTGCTTCAAGCTTCCTCCTTCGTTTTGAGGCGAGGCATGCCCTGTAGCCCTAAATTTCCCCGTATTGGCTTCGGCGACCCTAAAACATGCATCATGGTGGACTCCCTGGGGGGGTTAACTGCGTCGCCCTCCAACTCGGGTAGCTTGTTCACCCAACACCCCTTATGGTAACGGACAGAGTCCACTAGGTCGACCGCTACGACGGGGTTTTTACGCATGACAAGGTAAGACGGACTCGCGCGGAAGGAGAGAGAAGATAAATCATCACATGAGCGAGGTGCACCACGATCTATCTTCTTGTCAGGTAATTCCGATAACACAAGACAGACAATACGACCTACGTGAAGACTCAATGAACCAGCGACTCTCACATAATGAGGAATCGTGTTGATTCAACGGTGGTCGGAGCCTTACAAGCTTTGTACTTAGGCTGTATGCAGCGAGGGTTATGACATTATTAATGACGCGTATTGTCTAACGGTTGCACCCTTGAAGTACTGCTTATGAGATTTCAGGAACAGTATAAGTCGGTGCCGTTCCATAGTTAAGTACCGATGCAACATACTCTTTTGAGATACATTAGGATAGCTCGAGATGCCAGCCACCACGTCACATCGTTCAACCCTCAGAGAGGCTGTCGCTAGATTTAAGAAAGAGATGTTCAGGGTATAGACCCCTTTCGTAGGGGGGGTCAGGGCAGCAATGGAGATGTGCTACTATGGCAGAAGCATTACCTGAAGGTTCCATCACGCGTCAAATGGCGTTCGAAGGACCTAGCCATGCACCA"
def hamm(s1, s2):
# sum differences between s1 and s2
return sum(1 for a, b in zip(s1, s2) if a != b)
print(hamm(S1, S2))
| 182 | 933 | 0.967033 | 31 | 2,002 | 62.451613 | 0.612903 | 0.006198 | 0.008264 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005612 | 0.020979 | 2,002 | 10 | 934 | 200.2 | 0.982143 | 0.016484 | 0 | 0 | 0 | 0 | 0.941535 | 0.941535 | 0 | 1 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0 | 0.2 | 0.4 | 0.2 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 6 |
658c688394ac35842c64ccdc0d2c673da823bb1d | 67,879 | py | Python | tests/test_generator.py | pksol/mock_autogen | 5eff3db7bac5f59c5c1c54cfcb4e66576d261ed9 | [
"MIT"
] | 26 | 2018-12-24T15:02:34.000Z | 2022-03-17T06:26:08.000Z | tests/test_generator.py | pksol/mock_autogen | 5eff3db7bac5f59c5c1c54cfcb4e66576d261ed9 | [
"MIT"
] | 29 | 2018-10-20T17:45:53.000Z | 2022-03-08T02:24:24.000Z | tests/test_generator.py | pksol/mock_autogen | 5eff3db7bac5f59c5c1c54cfcb4e66576d261ed9 | [
"MIT"
] | null | null | null | import re
import sys
from collections import namedtuple
import pytest
import mock_autogen.generator
import tests.sample.code.tested_module
import tests.sample.code.second_module
from tests.sample.code.comprehensions_and_loops import get_square_root, \
summarize_environ_values, trimmed_strings, \
get_square_root_external_variable
from tests.sample.code.same_method_name import get_username_and_password
from tests.sample.code.subscripts import list_subscript_games
import tests.sample.code.with_statements as with_statements
MOCKED_MODULES_HEADER = "# mocked modules\n"
MOCKED_MODULES = "mock_os = mocker.MagicMock(name='os')\n" \
"mocker.patch('tests.sample.code.tested_module.os', " \
"new=mock_os)\n" \
"mock_random = mocker.MagicMock(name='random')\n" \
"mocker.patch('tests.sample.code.tested_module.random', " \
"new=mock_random)\n" \
"mock_second_module = " \
"mocker.MagicMock(name='second_module')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"second_module', new=mock_second_module)\n" \
"mock_zipfile = mocker.MagicMock(name='zipfile')\n" \
"mocker.patch('tests.sample.code.tested_module.zipfile', " \
"new=mock_zipfile)\n"
MOCKED_DEPENDENCIES_HEADER = "# mocked dependencies\n"
MOCKED_FUNCTIONS_HEADER = "# mocked functions\n"
MOCKED_FUNCTIONS = "mock_add = mocker.MagicMock(name='add')\n" \
"mocker.patch('tests.sample.code.tested_module.add', " \
"new=mock_add)\n" \
"mock_append_to_cwd = " \
"mocker.MagicMock(name='append_to_cwd')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"append_to_cwd', new=mock_append_to_cwd)\n" \
"mock_are_in_same_folder = " \
"mocker.MagicMock(name='are_in_same_folder')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"are_in_same_folder', new=mock_are_in_same_folder)\n" \
"mock_base_64_partial_functions = " \
"mocker.MagicMock(name='base_64_partial_functions')\n" \
"mocker.patch('tests.sample.code." \
"tested_module.base_64_partial_functions', " \
"new=mock_base_64_partial_functions)\n" \
"mock_base_64_whole_modules = " \
"mocker.MagicMock(name='base_64_whole_modules')\n" \
"mocker.patch('tests.sample.code." \
"tested_module.base_64_whole_modules', " \
"new=mock_base_64_whole_modules)\n" \
"mock_get_current_time = " \
"mocker.MagicMock(name='get_current_time')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"get_current_time', new=mock_get_current_time)\n" \
"mock_get_random_number = mocker.MagicMock" \
"(name='get_random_number')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"get_random_number', new=mock_get_random_number)\n" \
"mock_os_remove_wrap = mocker.MagicMock(" \
"name='os_remove_wrap')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"os_remove_wrap', new=mock_os_remove_wrap)\n" \
"mock_other_dir = mocker.MagicMock(name='other_dir')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"other_dir', new=mock_other_dir)\n" \
"mock_process_and_zip = mocker.MagicMock(" \
"name='process_and_zip')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"process_and_zip', new=mock_process_and_zip)\n" \
"mock_rm_alias = mocker.MagicMock(name='rm_alias')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"rm_alias', new=mock_rm_alias)\n" \
"mock_second_dir = mocker.MagicMock(name='second_dir')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"second_dir', new=mock_second_dir)\n" \
"mock_use_first_class = mocker.MagicMock(" \
"name='use_first_class')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"use_first_class', new=mock_use_first_class)\n" \
"mock_use_second_class_static = mocker.MagicMock(" \
"name='use_second_class_static')\n" \
"mocker.patch('tests.sample.code.tested_module." \
"use_second_class_static', " \
"new=mock_use_second_class_static)\n"
MOCKED_BUILTIN = "mock_os_remove = mocker.MagicMock(name='os_remove')\n" \
"mocker.patch('tests.sample.code.tested_module.os_remove', " \
"new=mock_os_remove)\n"
MOCKED_METHODS_HEADER = "# mocked methods\n"
MOCKED_METHODS = "mocker.patch.object(first, 'increase_class_counter')\n" \
"mocker.patch.object(first, 'increase_global_counter')\n" \
"mocker.patch.object(first, 'not_implemented')\n" \
"mocker.patch.object(first, 'using_not_implemented')\n"
MOCKED_CLASSES_HEADER = "# mocked classes\n"
MOCKED_CLASSES = "mock_FirstClass = mocker.MagicMock(name='FirstClass', " \
"spec=tests.sample.code.tested_module.FirstClass)\n" \
"mocker.patch('tests.sample.code.tested_module.FirstClass'," \
" new=mock_FirstClass)\n" \
"mock_SecondClass = mocker.MagicMock(name='SecondClass', " \
"spec=tests.sample.code.tested_module.SecondClass)\n" \
"mocker.patch('tests.sample.code.tested_module.SecondClass'" \
", new=mock_SecondClass)\n"
MOCKED_REFERENCED_CLASSES = "mock_dt = mocker.MagicMock(name='dt', " \
"spec=tests.sample.code.tested_module.dt)\n" \
"mocker.patch('tests.sample.code.tested_module." \
"dt', new=mock_dt)\n"
MOCKED_CLASSES_STATIC = """
class MockedFirstClassMeta(type):
static_instance = mocker.MagicMock(spec=tests.sample.code.tested_module.FirstClass)
def __getattr__(cls, key):
return MockedFirstClassMeta.static_instance.__getattr__(key)
class MockedFirstClass(metaclass=MockedFirstClassMeta):
original_cls = tests.sample.code.tested_module.FirstClass
instances = []
def __new__(cls, *args, **kwargs):
MockedFirstClass.instances.append(mocker.MagicMock(spec=MockedFirstClass.original_cls))
MockedFirstClass.instances[-1].__class__ = MockedFirstClass
return MockedFirstClass.instances[-1]
mocker.patch('tests.sample.code.tested_module.FirstClass', new=MockedFirstClass)
class MockedSecondClassMeta(type):
static_instance = mocker.MagicMock(spec=tests.sample.code.tested_module.SecondClass)
def __getattr__(cls, key):
return MockedSecondClassMeta.static_instance.__getattr__(key)
class MockedSecondClass(metaclass=MockedSecondClassMeta):
original_cls = tests.sample.code.tested_module.SecondClass
instances = []
def __new__(cls, *args, **kwargs):
MockedSecondClass.instances.append(mocker.MagicMock(spec=MockedSecondClass.original_cls))
MockedSecondClass.instances[-1].__class__ = MockedSecondClass
return MockedSecondClass.instances[-1]
mocker.patch('tests.sample.code.tested_module.SecondClass', new=MockedSecondClass)
"""
MOCKED_REFERENCED_CLASSES_STATIC = """
class MockeddtMeta(type):
static_instance = mocker.MagicMock(spec=tests.sample.code.tested_module.dt)
def __getattr__(cls, key):
return MockeddtMeta.static_instance.__getattr__(key)
class Mockeddt(metaclass=MockeddtMeta):
original_cls = tests.sample.code.tested_module.dt
instances = []
def __new__(cls, *args, **kwargs):
Mockeddt.instances.append(mocker.MagicMock(spec=Mockeddt.original_cls))
Mockeddt.instances[-1].__class__ = Mockeddt
return Mockeddt.instances[-1]
mocker.patch('tests.sample.code.tested_module.dt', new=Mockeddt)
"""
PREPARE_ASSERTS_CALLS_HEADER = "# calls to generate_asserts, put this after the 'act'\nimport mock_autogen\n"
PREPARE_ASSERTS_CALLS_DEFAULT = """mock_autogen.generate_asserts(mock_os, name='mock_os')
mock_autogen.generate_asserts(mock_random, name='mock_random')
mock_autogen.generate_asserts(mock_second_module, name='mock_second_module')
mock_autogen.generate_asserts(mock_zipfile, name='mock_zipfile')
mock_autogen.generate_asserts(mock_os_remove, name='mock_os_remove')
mock_autogen.generate_asserts(mock_dt, name='mock_dt')
"""
PREPARE_ASSERTS_CALLS_ALL = """mock_autogen.generate_asserts(mock_os, name='mock_os')
mock_autogen.generate_asserts(mock_random, name='mock_random')
mock_autogen.generate_asserts(mock_second_module, name='mock_second_module')
mock_autogen.generate_asserts(mock_zipfile, name='mock_zipfile')
mock_autogen.generate_asserts(mock_add, name='mock_add')
mock_autogen.generate_asserts(mock_append_to_cwd, name='mock_append_to_cwd')
mock_autogen.generate_asserts(mock_are_in_same_folder, name='mock_are_in_same_folder')
mock_autogen.generate_asserts(mock_base_64_partial_functions, name='mock_base_64_partial_functions')
mock_autogen.generate_asserts(mock_base_64_whole_modules, name='mock_base_64_whole_modules')
mock_autogen.generate_asserts(mock_get_current_time, name='mock_get_current_time')
mock_autogen.generate_asserts(mock_get_random_number, name='mock_get_random_number')
mock_autogen.generate_asserts(mock_os_remove_wrap, name='mock_os_remove_wrap')
mock_autogen.generate_asserts(mock_other_dir, name='mock_other_dir')
mock_autogen.generate_asserts(mock_process_and_zip, name='mock_process_and_zip')
mock_autogen.generate_asserts(mock_rm_alias, name='mock_rm_alias')
mock_autogen.generate_asserts(mock_second_dir, name='mock_second_dir')
mock_autogen.generate_asserts(mock_use_first_class, name='mock_use_first_class')
mock_autogen.generate_asserts(mock_use_second_class_static, name='mock_use_second_class_static')
mock_autogen.generate_asserts(mock_os_remove, name='mock_os_remove')
mock_autogen.generate_asserts(mock_FirstClass, name='mock_FirstClass')
mock_autogen.generate_asserts(mock_SecondClass, name='mock_SecondClass')
mock_autogen.generate_asserts(mock_dt, name='mock_dt')
"""
MOCKED_WARNINGS_HEADER = "# warnings\n"
MocksAllCollection = namedtuple(
'MocksAllCollection', 'os, second_module, add, append_to_cwd, '
'are_in_same_folder, other_dir, '
'rm_alias, os_remove_wrap, second_dir, os_remove')
MocksModulesOnlyCollection = namedtuple('MocksModulesOnlyCollection',
'os, second_module, zipfile')
MocksReferencedClassesOnlyCollection = namedtuple('MocksClassesOnlyCollection',
'datetime')
MocksFunctionsOnlyCollection = namedtuple(
'MocksFunctionsOnlyCollection', 'add, append_to_cwd, '
'are_in_same_folder, '
'other_dir, rm_alias, '
'os_remove_wrap, second_dir')
MocksBuiltinOnlyCollection = namedtuple('MocksAllCollection', 'os_remove')
@pytest.fixture
def mock_referenced_classes_only_collection(mocker):
"""
The mocks are taken from `test_generate_mocks_referenced_classes_only` :)
Args:
mocker (pytest.fixture): the mocker fixture
Yields:
MocksReferencedClassesOnlyCollection: The generated mocks.
"""
# mocked classes
mock_dt = mocker.MagicMock(name='dt',
spec=tests.sample.code.tested_module.dt)
mocker.patch('tests.sample.code.tested_module.dt', new=mock_dt)
yield MocksReferencedClassesOnlyCollection(mock_dt)
@pytest.fixture
def mock_modules_only_collection(mocker):
"""
The mocks are taken from `test_generate_mocks_modules_only` :)
Args:
mocker (pytest.fixture): the mocker fixture
Yields:
MocksModulesOnlyCollection: The generated mocks.
"""
# mocked modules
mock_os = mocker.MagicMock(name='os')
mocker.patch('tests.sample.code.tested_module.os', new=mock_os)
mock_second_module = mocker.MagicMock(name='second_module')
mocker.patch('tests.sample.code.tested_module.second_module',
new=mock_second_module)
mock_zipfile = mocker.MagicMock(name='zipfile')
mocker.patch('tests.sample.code.tested_module.zipfile', new=mock_zipfile)
yield MocksModulesOnlyCollection(mock_os, mock_second_module, mock_zipfile)
@pytest.fixture
def mock_functions_only_collection(mocker):
"""
The mocks are taken from `test_generate_mocks_functions_only` :)
Args:
mocker (pytest.fixture): the mocker fixture
Yields:
MocksFunctionsOnlyCollection: The generated mocks.
"""
# mocked functions
mock_add = mocker.MagicMock(name='add')
mocker.patch('tests.sample.code.tested_module.add', new=mock_add)
mock_append_to_cwd = mocker.MagicMock(name='append_to_cwd')
mocker.patch('tests.sample.code.tested_module.append_to_cwd',
new=mock_append_to_cwd)
mock_are_in_same_folder = mocker.MagicMock(name='are_in_same_folder')
mocker.patch('tests.sample.code.tested_module.are_in_same_folder',
new=mock_are_in_same_folder)
mock_other_dir = mocker.MagicMock(name='other_dir')
mocker.patch('tests.sample.code.tested_module.other_dir',
new=mock_other_dir)
mock_process_and_zip = mocker.MagicMock(name='process_and_zip')
mocker.patch('tests.sample.code.tested_module.process_and_zip',
new=mock_process_and_zip)
mock_rm_alias = mocker.MagicMock(name='rm_alias')
mocker.patch('tests.sample.code.tested_module.rm_alias', new=mock_rm_alias)
mock_os_remove_wrap = mocker.MagicMock(name='os_remove_wrap')
mocker.patch('tests.sample.code.tested_module.os_remove_wrap',
new=mock_os_remove_wrap)
mock_second_dir = mocker.MagicMock(name='second_dir')
mocker.patch('tests.sample.code.tested_module.second_dir',
new=mock_second_dir)
yield MocksFunctionsOnlyCollection(mock_add, mock_append_to_cwd,
mock_are_in_same_folder, mock_other_dir,
mock_rm_alias, mock_os_remove_wrap,
mock_second_dir)
@pytest.fixture
def mock_builtin_only_collection(mocker):
"""
The mocks are taken from `test_generate_mocks_builtin_only` :)
Args:
mocker (pytest.fixture): the mocker fixture
Yields:
MocksBuiltinOnlyCollection: The generated mocks.
"""
# mocked functions
mock_os_remove = mocker.MagicMock(name='os_remove')
mocker.patch('tests.sample.code.tested_module.os_remove',
new=mock_os_remove)
yield MocksBuiltinOnlyCollection(mock_os_remove)
@pytest.fixture
def mock_everything_collection(mocker):
"""
The mocks are taken from `test_generate_mocks_all` :)
Args:
mocker (pytest.fixture): the mocker fixture
Yields:
MocksAllCollection: The generated mocks.
"""
# mocked modules
mock_os = mocker.MagicMock(name='os')
mocker.patch('tests.sample.code.tested_module.os', new=mock_os)
mock_second_module = mocker.MagicMock(name='second_module')
mocker.patch('tests.sample.code.tested_module.second_module',
new=mock_second_module)
# mocked functions
mock_add = mocker.MagicMock(name='add')
mocker.patch('tests.sample.code.tested_module.add', new=mock_add)
mock_append_to_cwd = mocker.MagicMock(name='append_to_cwd')
mocker.patch('tests.sample.code.tested_module.append_to_cwd',
new=mock_append_to_cwd)
mock_are_in_same_folder = mocker.MagicMock(name='are_in_same_folder')
mocker.patch('tests.sample.code.tested_module.are_in_same_folder',
new=mock_are_in_same_folder)
mock_other_dir = mocker.MagicMock(name='other_dir')
mocker.patch('tests.sample.code.tested_module.other_dir',
new=mock_other_dir)
mock_rm_alias = mocker.MagicMock(name='rm_alias')
mocker.patch('tests.sample.code.tested_module.rm_alias', new=mock_rm_alias)
mock_os_remove_wrap = mocker.MagicMock(name='os_remove_wrap')
mocker.patch('tests.sample.code.tested_module.os_remove_wrap',
new=mock_os_remove_wrap)
mock_second_dir = mocker.MagicMock(name='second_dir')
mocker.patch('tests.sample.code.tested_module.second_dir',
new=mock_second_dir)
mock_os_remove = mocker.MagicMock(name='os_remove')
mocker.patch('tests.sample.code.tested_module.os_remove',
new=mock_os_remove)
yield MocksAllCollection(mock_os, mock_second_module, mock_add,
mock_append_to_cwd, mock_are_in_same_folder,
mock_other_dir, mock_rm_alias,
mock_os_remove_wrap, mock_second_dir,
mock_os_remove)
def test_generate_mocks_modules_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=True,
mock_functions=False,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=False,
mock_classes_static=False,
prepare_asserts_calls=False)
assert MOCKED_MODULES_HEADER + MOCKED_MODULES == generated_mocks
def test_generate_mocks_functions_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=False,
mock_functions=True,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=False,
mock_classes_static=False,
prepare_asserts_calls=False)
assert MOCKED_FUNCTIONS_HEADER + MOCKED_FUNCTIONS == generated_mocks
def test_generate_mocks_object_methods_only():
first = tests.sample.code.tested_module.FirstClass('20')
generated_mocks_instance = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
first,
name='first',
mock_modules=False,
mock_functions=True,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=False,
mock_classes_static=False,
prepare_asserts_calls=False)
assert MOCKED_METHODS_HEADER + MOCKED_METHODS == generated_mocks_instance
def test_generate_mocks_builtin_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=False,
mock_functions=False,
mock_builtin=True,
mock_classes=False,
mock_referenced_classes=False,
mock_classes_static=False,
prepare_asserts_calls=False)
assert MOCKED_FUNCTIONS_HEADER + MOCKED_BUILTIN == generated_mocks
def test_generate_mocks_classes_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=False,
mock_functions=False,
mock_builtin=False,
mock_classes=True,
mock_referenced_classes=False,
mock_classes_static=False,
prepare_asserts_calls=False)
assert MOCKED_CLASSES_HEADER + MOCKED_CLASSES == generated_mocks
def test_generate_mocks_referenced_classes_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=False,
mock_functions=False,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=True,
mock_classes_static=False,
prepare_asserts_calls=False)
assert MOCKED_CLASSES_HEADER + MOCKED_REFERENCED_CLASSES == generated_mocks
def test_generate_mocks_classes_static_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=False,
mock_functions=False,
mock_builtin=False,
mock_classes=True,
mock_referenced_classes=False,
mock_classes_static=True,
prepare_asserts_calls=False)
assert MOCKED_CLASSES_HEADER + MOCKED_CLASSES_STATIC == generated_mocks
def test_generate_mocks_referenced_classes_static_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=False,
mock_functions=False,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=True,
mock_classes_static=True,
prepare_asserts_calls=False)
assert MOCKED_CLASSES_HEADER + MOCKED_REFERENCED_CLASSES_STATIC \
== generated_mocks
def test_generate_mocks_prepare_asserts_calls_only():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=False,
mock_functions=False,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=False,
mock_classes_static=False,
prepare_asserts_calls=True)
assert not generated_mocks
def test_generate_mocks_all():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module,
mock_modules=True,
mock_functions=True,
mock_builtin=True,
mock_classes=True,
mock_referenced_classes=True,
mock_classes_static=False,
prepare_asserts_calls=True)
assert MOCKED_MODULES_HEADER + MOCKED_MODULES + \
MOCKED_FUNCTIONS_HEADER + MOCKED_FUNCTIONS + MOCKED_BUILTIN + \
MOCKED_CLASSES_HEADER + MOCKED_CLASSES + MOCKED_REFERENCED_CLASSES + \
PREPARE_ASSERTS_CALLS_HEADER + PREPARE_ASSERTS_CALLS_ALL \
== generated_mocks
def test_generate_mocks_default():
generated_mocks = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module)
assert MOCKED_MODULES_HEADER + MOCKED_MODULES + \
MOCKED_FUNCTIONS_HEADER + MOCKED_BUILTIN + \
MOCKED_CLASSES_HEADER + MOCKED_REFERENCED_CLASSES + \
PREPARE_ASSERTS_CALLS_HEADER + PREPARE_ASSERTS_CALLS_DEFAULT == generated_mocks
def _extract_warnings_generated_mocks_and_generated_asserts(expected):
warnings = []
generated_mocks = []
generated_asserts = []
inside_warnings = False
inside_mocks = False
inside_asserts = False
for line in expected.splitlines():
if line == MOCKED_WARNINGS_HEADER.rstrip():
inside_warnings = True
if line == MOCKED_FUNCTIONS_HEADER.rstrip(
) or line == MOCKED_DEPENDENCIES_HEADER.rstrip():
inside_warnings = False
inside_mocks = True
if line == PREPARE_ASSERTS_CALLS_HEADER.splitlines()[0]:
inside_mocks = False
inside_asserts = True
if inside_warnings:
warnings.append(line)
if inside_mocks:
generated_mocks.append(line)
if inside_asserts:
generated_asserts.append(line)
return warnings, generated_mocks, generated_asserts
def test_generate_mocks_function_inner_imports(mocker):
wo_mock = tests.sample.code.tested_module.base_64_whole_modules("my msg1")
assert re.match(r"^MY MSG1.*False$", wo_mock) # without mocks
expected = """# warnings
# could not convert a function call into a mock on node:
# (message.upper() + suffix). \
# encode('ascii')
# mocked dependencies
mock_randint = mocker.MagicMock(name='randint')
mocker.patch('tests.sample.code.tested_module.random.randint', new=mock_randint)
mock_get_random_number = mocker.MagicMock(name='get_random_number')
mocker.patch('tests.sample.code.tested_module.get_random_number', new=mock_get_random_number)
mock_str = mocker.MagicMock(name='str')
mocker.patch('tests.sample.code.tested_module.str', new=mock_str)
mock_isfile = mocker.MagicMock(name='isfile')
mocker.patch('tests.sample.code.tested_module.os.path.isfile', new=mock_isfile)
mock_b64encode = mocker.MagicMock(name='b64encode')
mocker.patch('base64.b64encode', new=mock_b64encode)
mock_b64decode = mocker.MagicMock(name='b64decode')
mocker.patch('base64.b64decode', new=mock_b64decode)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_randint, name='mock_randint')
mock_autogen.generate_asserts(mock_get_random_number, name='mock_get_random_number')
mock_autogen.generate_asserts(mock_str, name='mock_str')
mock_autogen.generate_asserts(mock_isfile, name='mock_isfile')
mock_autogen.generate_asserts(mock_b64encode, name='mock_b64encode')
mock_autogen.generate_asserts(mock_b64decode, name='mock_b64decode')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module.base_64_whole_modules)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
# don't compare warning code since python version might be less than 3.8
assert expected_warnings[0:2] == generated_warnings[0:2]
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated +
"\nmock_b64decode.return_value.decode.return_value = '20'")
w_mock = tests.sample.code.tested_module.base_64_whole_modules("my msg2")
assert "20" == w_mock
def test_generate_mocks_function_inner_imports_partial_functions(mocker):
wo_mock = tests.sample.code.tested_module.base_64_partial_functions(
"my msg1")
assert re.match(r"^MY MSG1.*False$", wo_mock) # without mocks
expected = """# warnings
# could not convert a function call into a mock on node:
# (message.upper() + suffix). \
# encode('ascii')
# mocked dependencies
mock_randint = mocker.MagicMock(name='randint')
mocker.patch('tests.sample.code.tested_module.random.randint', new=mock_randint)
mock_get_random_number = mocker.MagicMock(name='get_random_number')
mocker.patch('tests.sample.code.tested_module.get_random_number', new=mock_get_random_number)
mock_str = mocker.MagicMock(name='str')
mocker.patch('tests.sample.code.tested_module.str', new=mock_str)
mock_isfile = mocker.MagicMock(name='isfile')
mocker.patch('tests.sample.code.tested_module.os.path.isfile', new=mock_isfile)
mock_b64encode = mocker.MagicMock(name='b64encode')
mocker.patch('base64.b64encode', new=mock_b64encode)
mock_b64decode = mocker.MagicMock(name='b64decode')
mocker.patch('base64.b64decode', new=mock_b64decode)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_randint, name='mock_randint')
mock_autogen.generate_asserts(mock_get_random_number, name='mock_get_random_number')
mock_autogen.generate_asserts(mock_str, name='mock_str')
mock_autogen.generate_asserts(mock_isfile, name='mock_isfile')
mock_autogen.generate_asserts(mock_b64encode, name='mock_b64encode')
mock_autogen.generate_asserts(mock_b64decode, name='mock_b64decode')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module.base_64_partial_functions)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
# don't compare warning code since python version might be less than 3.8
assert expected_warnings[0:2] == generated_warnings[0:2]
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated +
"\nmock_b64decode.return_value.decode.return_value = '20'")
w_mock = tests.sample.code.tested_module.base_64_partial_functions(
"my msg2")
assert "20" == w_mock
def test_generate_mocks_function_list_comprehension(mocker):
wo_mock = get_square_root([1, 4, 9])
assert [1, 2, 3] == wo_mock # without mocks
expected = """# mocked dependencies
mock_sqrt = mocker.MagicMock(name='sqrt')
mocker.patch('tests.sample.code.comprehensions_and_loops.math.sqrt', new=mock_sqrt)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_sqrt, name='mock_sqrt')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK, get_square_root)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated +
"\nmock_sqrt.side_effect = [-1]*len('not a list of numbers')")
w_mock = get_square_root('not a list of numbers')
assert [-1] * len('not a list of numbers') == w_mock
def test_generate_mocks_function_list_comprehension_external_variable(mocker):
wo_mock = get_square_root_external_variable()
assert [1, 2, 3] == wo_mock # without mocks
expected = """# mocked dependencies
mock_sqrt = mocker.MagicMock(name='sqrt')
mocker.patch('tests.sample.code.comprehensions_and_loops.math.sqrt', new=mock_sqrt)
mock_external_items = mocker.MagicMock(name='external_items')
mocker.patch('tests.sample.code.comprehensions_and_loops.external_items', new=mock_external_items)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_sqrt, name='mock_sqrt')
mock_autogen.generate_asserts(mock_external_items, name='mock_external_items')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
get_square_root_external_variable)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated +
"\nmock_sqrt.side_effect = [-1]*len('not a list of numbers')"
"\nmock_external_items.__iter__.return_value = [9, 16, 25, 36]")
w_mock = get_square_root_external_variable()
assert [-1] * 4 == w_mock # we changed the number of items in the external
def test_generate_mocks_lock_external_variable(mocker, capsys):
with_statements.single_thread_dict = {}
wo_mock = with_statements.outside_lock_context("some", "value")
assert "value" == wo_mock # without mocks
wo_mock = with_statements.outside_lock_context("some", "other value")
assert "value" == wo_mock # without mocks
expected = """# mocked dependencies
mock_lock = mocker.MagicMock(name='lock')
mocker.patch('tests.sample.code.with_statements.lock', new=mock_lock)
mock_single_thread_dict = mocker.MagicMock(name='single_thread_dict')
mocker.patch('tests.sample.code.with_statements.single_thread_dict', new=mock_single_thread_dict)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_lock, name='mock_lock')
mock_autogen.generate_asserts(mock_single_thread_dict, name='mock_single_thread_dict')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
with_statements.outside_lock_context)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec("\n".join(generated_mocks) +
"\nmock_single_thread_dict.__contains__.return_value = False"
"\nmock_single_thread_dict.__getitem__.return_value = 'strange'")
w_mock = with_statements.outside_lock_context("some", "third value")
assert 'strange' == w_mock
capsys.readouterr().out # this clears the existing output
exec("\n".join(generated_asserts))
expected_mock_results = """mock_lock.__enter__.assert_called_once_with()
mock_lock.__exit__.assert_called_once_with(None, None, None)
mock_single_thread_dict.__contains__.assert_called_once_with('some')
mock_single_thread_dict.__setitem__.assert_called_once_with('some', 'third value')
mock_single_thread_dict.__getitem__.assert_called_once_with('some')
"""
assert expected_mock_results == capsys.readouterr().out
def test_generate_mocks_function_dict_comprehension(mocker):
expected = """# mocked dependencies
mock_len = mocker.MagicMock(name='len')
mocker.patch('tests.sample.code.comprehensions_and_loops.len', new=mock_len)
mock_items = mocker.MagicMock(name='items')
mocker.patch('tests.sample.code.comprehensions_and_loops.os.environ.items', new=mock_items)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_len, name='mock_len')
mock_autogen.generate_asserts(mock_items, name='mock_items')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
summarize_environ_values)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated + "\nmock_len.side_effect = range(3)" +
"\nmock_items.return_value = (('a','b'), ('c','d'), ('e','f'),)")
w_mock = summarize_environ_values()
assert {'a': 0, 'c': 1, 'e': 2} == w_mock
def test_generate_mocks_function_dict_comprehension_ignore_variables(mocker):
expected = """# mocked dependencies
mock_len = mocker.MagicMock(name='len')
mocker.patch('tests.sample.code.comprehensions_and_loops.len', new=mock_len)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_len, name='mock_len')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK, trimmed_strings)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated + "\nmock_len.return_value = 20")
w_mock = trimmed_strings(["a", "bb", "cc "])
assert {'a': 20, 'cc': 20, 'bb': 20} == w_mock
def test_generate_mocks_function_subscript(mocker):
expected = """# mocked dependencies
mock_sqrt = mocker.MagicMock(name='sqrt')
mocker.patch('tests.sample.code.subscripts.math.sqrt', new=mock_sqrt)
mock_randint = mocker.MagicMock(name='randint')
mocker.patch('tests.sample.code.subscripts.random.randint', new=mock_randint)
mock_str = mocker.MagicMock(name='str')
mocker.patch('tests.sample.code.subscripts.str', new=mock_str)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_sqrt, name='mock_sqrt')
mock_autogen.generate_asserts(mock_randint, name='mock_randint')
mock_autogen.generate_asserts(mock_str, name='mock_str')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
list_subscript_games)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated + "\nmock_sqrt.return_value = 0" +
"\nmock_randint.return_value = 0" + "\nmock_str.return_value = '7'")
my_list = [1, 2, 3, 4, 5]
list_subscript_games(my_list)
assert [-1, '7', 5] == my_list
def test_generate_mocks_function_same_function_name_different_objects(mocker):
wo_mock = get_username_and_password()
assert "some_username,some_password" == wo_mock # without mocks
expected = """# mocked dependencies
mock_get = mocker.MagicMock(name='get')
mocker.patch('tests.sample.code.same_method_name.get', new=mock_get)
mock_get_2 = mocker.MagicMock(name='get_2')
mocker.patch('tests.sample.code.same_method_name.os.environ.get', new=mock_get_2)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_get, name='mock_get')
mock_autogen.generate_asserts(mock_get_2, name='mock_get_2')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
get_username_and_password)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated + "\nmock_get.return_value = 'made_up_username'"
"\nmock_get_2.return_value = 'made_up_password'")
w_mock = get_username_and_password()
assert 'made_up_username,made_up_password' == w_mock
def test_generate_mocks_method_inner_calls(mocker):
bin_op_class_name = 'ast.BinOp' if sys.version_info >= (
3, 9) else '_ast.BinOp'
global_before = tests.sample.code.tested_module.global_counter
prop_before = tests.sample.code.tested_module.FirstClass.prop
first = tests.sample.code.tested_module.FirstClass('20')
expected = f"""# warnings
# could not convert a function call into a mock on node:
# (suffix.upper() + suffix).encode('ascii')
# Can't stringify node of type <class '{bin_op_class_name}'>
# mocked dependencies
mock_randint = mocker.MagicMock(name='randint')
mocker.patch('tests.sample.code.tested_module.random.randint', new=mock_randint)
mock_get_random_number = mocker.MagicMock(name='get_random_number')
mocker.patch('tests.sample.code.tested_module.get_random_number', new=mock_get_random_number)
mock_str = mocker.MagicMock(name='str')
mocker.patch('tests.sample.code.tested_module.str', new=mock_str)
mock_isfile = mocker.MagicMock(name='isfile')
mocker.patch('tests.sample.code.tested_module.os.path.isfile', new=mock_isfile)
mock_b64encode = mocker.MagicMock(name='b64encode')
mocker.patch('base64.b64encode', new=mock_b64encode)
mock_b64decode = mocker.MagicMock(name='b64decode')
mocker.patch('base64.b64decode', new=mock_b64decode)
mock_increase_global_counter = mocker.MagicMock(name='increase_global_counter')
mocker.patch('tests.sample.code.tested_module.FirstClass.increase_global_counter', new=mock_increase_global_counter)
mock_increase_class_counter = mocker.MagicMock(name='increase_class_counter')
mocker.patch('tests.sample.code.tested_module.FirstClass.increase_class_counter', new=mock_increase_class_counter)
mock_not_implemented = mocker.MagicMock(name='not_implemented')
mocker.patch('tests.sample.code.tested_module.FirstClass.not_implemented', new=mock_not_implemented)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_randint, name='mock_randint')
mock_autogen.generate_asserts(mock_get_random_number, name='mock_get_random_number')
mock_autogen.generate_asserts(mock_str, name='mock_str')
mock_autogen.generate_asserts(mock_isfile, name='mock_isfile')
mock_autogen.generate_asserts(mock_b64encode, name='mock_b64encode')
mock_autogen.generate_asserts(mock_b64decode, name='mock_b64decode')
mock_autogen.generate_asserts(mock_increase_global_counter, name='mock_increase_global_counter')
mock_autogen.generate_asserts(mock_increase_class_counter, name='mock_increase_class_counter')
mock_autogen.generate_asserts(mock_not_implemented, name='mock_not_implemented')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
first.using_not_implemented)
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(generated)
# don't compare warning code since python version might be less than 3.8
assert expected_warnings[0:2] == generated_warnings[0:2]
if sys.version_info >= (3, 8):
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
exec(generated) # verify the validity of generated mocks code
first.using_not_implemented()
assert global_before == tests.sample.code.tested_module.global_counter
assert prop_before == tests.sample.code.tested_module.FirstClass.prop
exec("mock_not_implemented.assert_called_once()")
def test_generate_mocks_static_method_inner_calls(mocker):
global_before = tests.sample.code.tested_module.global_counter
prop_before = tests.sample.code.tested_module.FirstClass.prop
first = tests.sample.code.tested_module.FirstClass('20')
expected = """# mocked dependencies
mock_get_random_number = mocker.MagicMock(name='get_random_number')
mocker.patch('tests.sample.code.tested_module.get_random_number', new=mock_get_random_number)
mock_staticmethod = mocker.MagicMock(name='staticmethod')
mocker.patch('tests.sample.code.tested_module.staticmethod', new=mock_staticmethod)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_get_random_number, name='mock_get_random_number')
mock_autogen.generate_asserts(mock_staticmethod, name='mock_staticmethod')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated_mocks_function = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
first.increase_global_counter)
generated_mocks_function_from_class = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module.FirstClass.increase_global_counter)
assert generated_mocks_function == generated_mocks_function_from_class
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(
generated_mocks_function)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated_mocks_function +
f"\nmock_get_random_number.return_value = {global_before}")
first.increase_global_counter()
assert global_before == tests.sample.code.tested_module.global_counter
assert prop_before == tests.sample.code.tested_module.FirstClass.prop
exec("mock_get_random_number.assert_called_once()")
def test_generate_mocks_class_method_inner_calls(mocker):
global_before = tests.sample.code.tested_module.global_counter
prop_before = tests.sample.code.tested_module.FirstClass.prop
first = tests.sample.code.tested_module.FirstClass('20')
expected = """# mocked dependencies
mock_get_random_number = mocker.MagicMock(name='get_random_number')
mocker.patch('tests.sample.code.tested_module.get_random_number', new=mock_get_random_number)
mock_increase_global_counter = mocker.MagicMock(name='increase_global_counter')
mocker.patch('tests.sample.code.tested_module.FirstClass.increase_global_counter', new=mock_increase_global_counter)
mock_classmethod = mocker.MagicMock(name='classmethod')
mocker.patch('tests.sample.code.tested_module.classmethod', new=mock_classmethod)
# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_get_random_number, name='mock_get_random_number')
mock_autogen.generate_asserts(mock_increase_global_counter, name='mock_increase_global_counter')
mock_autogen.generate_asserts(mock_classmethod, name='mock_classmethod')
"""
expected_warnings, expected_mocks, expected_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(expected)
generated_mocks_function = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
first.increase_class_counter)
generated_mocks_function_from_class = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
tests.sample.code.tested_module.FirstClass.increase_class_counter)
assert generated_mocks_function == generated_mocks_function_from_class
generated_warnings, generated_mocks, generated_asserts = \
_extract_warnings_generated_mocks_and_generated_asserts(
generated_mocks_function)
assert expected_warnings == generated_warnings
assert expected_mocks == generated_mocks
assert expected_asserts == generated_asserts
# verify the validity of generated mocks code
exec(generated_mocks_function +
f"\nmock_get_random_number.return_value = {prop_before}")
first.increase_class_counter()
assert global_before == tests.sample.code.tested_module.global_counter
assert prop_before == tests.sample.code.tested_module.FirstClass.prop
exec("mock_get_random_number.assert_called_once()")
exec("mock_increase_global_counter.assert_called_once()")
def test_generate_asserts_are_in_same_folder_args(mock_everything_collection):
tests.sample.code.tested_module.are_in_same_folder('/some/path/file1.txt',
'/some/path/file2.txt')
mock_are_in_same_folder = mock_everything_collection.are_in_same_folder
generated = mock_autogen.generator.generate_asserts(
mock_are_in_same_folder)
assert 'assert 1 == mock_are_in_same_folder.call_count\n' \
"mock_are_in_same_folder.assert_called_once_with(" \
"'/some/path/file1.txt', '/some/path/file2.txt')\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_rename_argument(mock_everything_collection):
tests.sample.code.tested_module.are_in_same_folder('/some/path/file1.txt',
'/some/path/file2.txt')
mock_are_in_same_folder = mock_everything_collection.are_in_same_folder
generated = mock_autogen.generator.generate_asserts(
mock_are_in_same_folder, name='my_mock')
assert 'assert 1 == my_mock.call_count\n' \
"my_mock.assert_called_once_with(" \
"'/some/path/file1.txt', '/some/path/file2.txt')\n" == generated
def test_generate_asserts_unable_to_find_argument(mock_everything_collection):
tests.sample.code.tested_module.are_in_same_folder('/some/path/file1.txt',
'/some/path/file2.txt')
generated = mock_autogen.generator.generate_asserts(
mock_everything_collection.are_in_same_folder)
assert 'assert 1 == arg.call_count\n' \
"arg.assert_called_once_with(" \
"'/some/path/file1.txt', '/some/path/file2.txt')\n" == generated
def test_generate_asserts_mocks_were_not_called(mock_everything_collection):
for mocked in mock_everything_collection:
generated = mock_autogen.generator.generate_asserts(mocked)
assert "mocked.assert_not_called()" == generated
exec(generated)
def test_generate_asserts_are_in_same_folder_kwargs(
mock_functions_only_collection):
tests.sample.code.tested_module.are_in_same_folder(
path1='/some/path/file1.txt', path2='/some/path/file2.txt')
mock_are_in_same_folder = mock_functions_only_collection.are_in_same_folder
generated = mock_autogen.generator.generate_asserts(
mock_are_in_same_folder)
assert "assert 1 == mock_are_in_same_folder.call_count\n" \
"mock_are_in_same_folder.assert_called_once_with(" \
"path1='/some/path/file1.txt', " \
"path2='/some/path/file2.txt')\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_are_in_same_folder_mix_args_kwargs(
mock_everything_collection):
tests.sample.code.tested_module.are_in_same_folder(
'/some/path/file1.txt', path2='/some/path/file2.txt')
mock_are_in_same_folder = mock_everything_collection.are_in_same_folder
generated = mock_autogen.generator.generate_asserts(
mock_are_in_same_folder)
assert "assert 1 == mock_are_in_same_folder.call_count\n" \
"mock_are_in_same_folder.assert_called_once_with(" \
"'/some/path/file1.txt', " \
"path2='/some/path/file2.txt')\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_rm_alias_builtin_only(mock_builtin_only_collection):
tests.sample.code.tested_module.rm_alias('/some/path/file1.txt')
mock_os_remove = mock_builtin_only_collection.os_remove
generated = mock_autogen.generator.generate_asserts(mock_os_remove)
assert "assert 1 == mock_os_remove.call_count\n" \
"mock_os_remove.assert_called_once_with('/some/path/file1.txt')\n" \
== generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_append_to_cwd_builtin_only(
mock_modules_only_collection):
tests.sample.code.tested_module.append_to_cwd('/some/path/file1.txt')
mock_os = mock_modules_only_collection.os
generated = mock_autogen.generator.generate_asserts(mock_os)
assert re.match(
r"^mock_os.getcwd.assert_called_once_with\(\)\n"
r"mock_os.path.join.assert_called_once_with"
r"\(<MagicMock name='os.getcwd\(\)' id='\d+'>, "
r"'/some/path/file1.txt'\)\n$", generated)
# added ANY to match the mock parameter
from mock import ANY
mock_os.path.join.assert_called_once_with(ANY, '/some/path/file1.txt')
mock_os.getcwd.assert_called_once_with()
def test_generate_asserts_append_to_cwd_builtin_only_mocked_cwd(
mock_modules_only_collection):
mock_os = mock_modules_only_collection.os
# added this so the assert can be affective.
# this is an example of the code the user has to add on top of the utility
mock_os.getcwd.return_value = '/some/pwd'
tests.sample.code.tested_module.append_to_cwd('/some/path/file1.txt')
generated = mock_autogen.generator.generate_asserts(mock_os)
assert "mock_os.getcwd.assert_called_once_with()\n" \
"mock_os.path.join.assert_called_once_with" \
"('/some/pwd', '/some/path/file1.txt')\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_add_mix_types(mock_functions_only_collection):
tests.sample.code.tested_module.add('one', 2)
mock_add = mock_functions_only_collection.add
generated = mock_autogen.generator.generate_asserts(mock_add)
assert 'assert 1 == mock_add.call_count\n' \
"mock_add.assert_called_once_with(" \
"'one', 2)\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_add_multiple_calls(mock_functions_only_collection):
tests.sample.code.tested_module.add(1, 2)
tests.sample.code.tested_module.add('one', 'two')
mock_add = mock_functions_only_collection.add
generated = mock_autogen.generator.generate_asserts(mock_add)
assert 'from mock import call\n\n' \
'assert 2 == mock_add.call_count\n' \
"mock_add.assert_has_calls(calls=[call(1, 2)," \
"call('one', 'two'),])\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_context_manager(mock_modules_only_collection):
tests.sample.code.tested_module.process_and_zip('/path/to.zip',
'in_zip.txt', 'foo bar')
mock_zipfile = mock_modules_only_collection.zipfile
generated = mock_autogen.generator.generate_asserts(mock_zipfile)
assert "mock_zipfile.ZipFile.assert_called_once_with(" \
"'/path/to.zip', 'w')\n" \
"mock_zipfile.ZipFile.return_value.__enter__." \
"assert_called_once_with()\n" \
"mock_zipfile.ZipFile.return_value.__enter__." \
"return_value.writestr.assert_called_once_with(" \
"'in_zip.txt', 'processed foo bar')\n" \
"mock_zipfile.ZipFile.return_value.__exit__." \
"assert_called_once_with(None, None, None)\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_class(mocker):
# mocked classes
mock_FirstClass = mocker.MagicMock(
name='FirstClass', spec=tests.sample.code.tested_module.FirstClass)
mocker.patch('tests.sample.code.tested_module.FirstClass',
new=mock_FirstClass)
tests.sample.code.tested_module.use_first_class('20')
generated = mock_autogen.generator.generate_asserts(mock_FirstClass)
assert "assert 1 == mock_FirstClass.call_count\n" \
"mock_FirstClass.assert_called_once_with('20')\n" \
"mock_FirstClass.return_value.not_implemented." \
"assert_called_once_with(None)\n" == generated
exec(generated) # verify the validity of assertions
def test_generate_asserts_non_overridden_repr(mocker):
# mocked classes
mock_FirstClass = mocker.MagicMock(
name='FirstClass', spec=tests.sample.code.tested_module.FirstClass)
mocker.patch('tests.sample.code.tested_module.FirstClass',
new=mock_FirstClass)
tests.sample.code.tested_module.use_first_class(
'20', tests.sample.code.tested_module.SecondClass(42))
generated = mock_autogen.generator.generate_asserts(mock_FirstClass)
assert re.match(
r"^assert 1 == mock_FirstClass.call_count\n"
r"mock_FirstClass.assert_called_once_with\('20'\)\n"
r"mock_FirstClass.return_value.not_implemented."
r"assert_called_once_with\(_tests.sample.code.tested_module."
r"SecondClass_object_at_0x[0-9A-Fa-f]+_\)\n$", generated)
def test_generate_asserts_class_static(mocker):
# mocked classes
class MockedSecondClassMeta(type):
static_instance = mocker.MagicMock(
spec=tests.sample.code.tested_module.SecondClass)
def __getattr__(cls, key):
return MockedSecondClassMeta.static_instance.__getattr__(key)
class MockedSecondClass(metaclass=MockedSecondClassMeta):
original_cls = tests.sample.code.tested_module.SecondClass
instances = []
def __new__(cls, *args, **kwargs):
MockedSecondClass.instances.append(
mocker.MagicMock(spec=MockedSecondClass.original_cls))
MockedSecondClass.instances[-1].__class__ = MockedSecondClass
return MockedSecondClass.instances[-1]
mocker.patch('tests.sample.code.tested_module.SecondClass',
new=MockedSecondClass)
tests.sample.code.tested_module.use_second_class_static('20')
assert 1 == len(MockedSecondClass.instances)
generated_static = mock_autogen.generator.generate_asserts(
MockedSecondClassMeta.static_instance,
name="MockedSecondClassMeta.static_instance")
assert re.match(
r"^MockedSecondClassMeta.static_instance.prop.__eq__."
r"assert_called_once_with\(<MagicMock "
r"name='mock.prop' id='\d+'>\)\n$", generated_static)
generated_instance = mock_autogen.generator.generate_asserts(
MockedSecondClass.instances[0], name="MockedSecondClass.instances[0]")
assert re.match(
r"^MockedSecondClass.instances\[0\].not_implemented."
r"assert_called_once_with\(\)\n"
r"MockedSecondClass.instances\[0\].prop.__eq__."
r"assert_called_once_with\("
r"<MagicMock name='mock.prop' id='\d+'>\)\n$", generated_instance)
def test_class_static_objects_behave_the_same(mocker):
# mocked classes
class MockedSecondClassMeta(type):
static_instance = mocker.MagicMock(
spec=tests.sample.code.tested_module.SecondClass)
def __getattr__(cls, key):
return MockedSecondClassMeta.static_instance.__getattr__(key)
class MockedSecondClass(metaclass=MockedSecondClassMeta):
original_cls = tests.sample.code.tested_module.SecondClass
instances = []
def __new__(cls, *args, **kwargs):
MockedSecondClass.instances.append(
mocker.MagicMock(spec=MockedSecondClass.original_cls))
MockedSecondClass.instances[-1].__class__ = MockedSecondClass
return MockedSecondClass.instances[-1]
mocker.patch('tests.sample.code.tested_module.SecondClass',
new=MockedSecondClass)
second = tests.sample.code.tested_module.SecondClass('20')
second.not_implemented()
with pytest.raises(AttributeError):
second.unknown_method()
assert isinstance(second, tests.sample.code.tested_module.SecondClass)
def test_referenced_class(mock_referenced_classes_only_collection):
mock_referenced_classes_only_collection.datetime.utcnow.return_value = 20
current_time = tests.sample.code.tested_module.get_current_time()
assert 20 == current_time
def test_mock_object_instance(mocker):
first = tests.sample.code.tested_module.FirstClass('20')
exec(MOCKED_METHODS) # mocks all the methods
first.not_implemented() # would have raised exception otherwise
first.not_implemented.assert_called_once_with()
def test_mock_object_class_direct(mocker):
first_class = tests.sample.code.tested_module.FirstClass
generated_mocks_class = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
first_class,
name='first_class',
mock_modules=False,
mock_functions=True,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=False,
mock_classes_static=False)
exec(generated_mocks_class)
first_class_instance = first_class(42)
first_class_instance.not_implemented('some param')
first_class_instance.not_implemented.assert_called_once_with('some param')
def test_mock_object_class_indirect(mocker):
first_class = tests.sample.code.tested_module.FirstClass
generated_mocks_class = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
first_class,
name='first_class',
mock_modules=False,
mock_functions=True,
mock_builtin=False,
mock_classes=False,
mock_referenced_classes=False,
mock_classes_static=False)
exec(generated_mocks_class)
first_class_instance = tests.sample.code.tested_module.FirstClass(42)
first_class_instance.not_implemented('some param')
first_class_instance.not_implemented.assert_called_once_with('some param')
@pytest.mark.parametrize('static', [True, False])
def test_generate_mocks_mocked_class_equals_to_module(static):
module = tests.sample.code.second_module
single_class = module.SingleClassInModule
generated_mocks_module = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
module,
name='some_name',
mock_modules=False,
mock_functions=False,
mock_builtin=False,
mock_classes=True,
mock_referenced_classes=False,
mock_classes_static=static)
generated_mocks_class = mock_autogen.generator.generate_mocks(
mock_autogen.generator.MockingFramework.PYTEST_MOCK,
single_class,
name='some_name',
mock_modules=False,
mock_functions=False,
mock_builtin=False,
mock_classes=True,
mock_referenced_classes=False,
mock_classes_static=static)
assert generated_mocks_module == generated_mocks_class
def test_generate_mocks_invalid_framework():
with pytest.raises(ValueError):
mock_autogen.generator.generate_mocks('unittest', tests.sample.code)
def test_generate_asserts_invalid_object():
with pytest.raises(TypeError):
mock_autogen.generator.generate_asserts('not a mock')
def test__single_call_to_generate_asserts():
assert "mock_autogen.generate_asserts(mock_name, name='mock_name')\n" == \
mock_autogen.generator._single_call_to_generate_asserts("mock_name")
@pytest.mark.parametrize('prepare_asserts_calls', [True, False])
def test__pytest_mock_dependencies_generate_no_functions(
prepare_asserts_calls):
generated = mock_autogen.generator._pytest_mock_dependencies_generate(
set(), prepare_asserts_calls)
assert "" == generated
@pytest.mark.parametrize('prepare_asserts_calls', [True, False])
def test__pytest_mock_dependencies_generate_one_function(
prepare_asserts_calls):
expected = """# mocked dependencies
mock_first_function = mocker.MagicMock(name='first_function')
mocker.patch('one.object.first_function', new=mock_first_function)
"""
if prepare_asserts_calls:
expected += """# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_first_function, name='mock_first_function')
"""
generated = mock_autogen.generator._pytest_mock_dependencies_generate(
[('one.object', 'first_function')], prepare_asserts_calls)
assert expected == generated
@pytest.mark.parametrize('prepare_asserts_calls', [True, False])
def test__pytest_mock_dependencies_generate_two_functions(
prepare_asserts_calls):
expected = """# mocked dependencies
mock_first_function = mocker.MagicMock(name='first_function')
mocker.patch('one.object.first_function', new=mock_first_function)
mock_second_function = mocker.MagicMock(name='second_function')
mocker.patch('second.object.second_function', new=mock_second_function)
"""
if prepare_asserts_calls:
expected += """# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_first_function, name='mock_first_function')
mock_autogen.generate_asserts(mock_second_function, name='mock_second_function')
"""
generated = mock_autogen.generator._pytest_mock_dependencies_generate(
[('one.object', 'first_function'),
('second.object', 'second_function')], prepare_asserts_calls)
assert expected == generated
@pytest.mark.parametrize('prepare_asserts_calls', [True, False])
def test__pytest_mock_dependencies_generate_two_functions_duplicate(
prepare_asserts_calls):
expected = """# mocked dependencies
mock_first_function = mocker.MagicMock(name='first_function')
mocker.patch('one.object.first_function', new=mock_first_function)
mock_first_function_2 = mocker.MagicMock(name='first_function_2')
mocker.patch('second.object.first_function', new=mock_first_function_2)
"""
if prepare_asserts_calls:
expected += """# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_first_function, name='mock_first_function')
mock_autogen.generate_asserts(mock_first_function_2, name='mock_first_function_2')
"""
generated = mock_autogen.generator._pytest_mock_dependencies_generate(
[('one.object', 'first_function'),
('second.object', 'first_function')], prepare_asserts_calls)
assert expected == generated
@pytest.mark.parametrize('prepare_asserts_calls', [True, False])
def test__pytest_mock_dependencies_generate_four_functions_duplicate(
prepare_asserts_calls):
expected = """# mocked dependencies
mock_first_function = mocker.MagicMock(name='first_function')
mocker.patch('one.object.first_function', new=mock_first_function)
mock_second_function = mocker.MagicMock(name='second_function')
mocker.patch('second.object.second_function', new=mock_second_function)
mock_first_function_2 = mocker.MagicMock(name='first_function_2')
mocker.patch('third.sub.module.first_function', new=mock_first_function_2)
mock_first_function_3 = mocker.MagicMock(name='first_function_3')
mocker.patch('fourth.first_function', new=mock_first_function_3)
"""
if prepare_asserts_calls:
expected += """# calls to generate_asserts, put this after the 'act'
import mock_autogen
mock_autogen.generate_asserts(mock_first_function, name='mock_first_function')
mock_autogen.generate_asserts(mock_second_function, name='mock_second_function')
mock_autogen.generate_asserts(mock_first_function_2, name='mock_first_function_2')
mock_autogen.generate_asserts(mock_first_function_3, name='mock_first_function_3')
"""
generated = mock_autogen.generator._pytest_mock_dependencies_generate(
[('one.object', 'first_function'),
('second.object', 'second_function'),
('third.sub.module', 'first_function'), ('fourth', 'first_function')],
prepare_asserts_calls)
assert expected == generated
| 43.04312 | 116 | 0.738491 | 8,382 | 67,879 | 5.593176 | 0.042353 | 0.04153 | 0.053432 | 0.065846 | 0.851542 | 0.801246 | 0.765731 | 0.732029 | 0.691886 | 0.66341 | 0 | 0.005732 | 0.16469 | 67,879 | 1,576 | 117 | 43.070431 | 0.821111 | 0.03686 | 0 | 0.592378 | 1 | 0.004143 | 0.401234 | 0.286249 | 0 | 0 | 0 | 0 | 0.292461 | 1 | 0.051367 | false | 0.0058 | 0.026512 | 0.001657 | 0.095278 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
65ba6b8928d1ac2f9052e75f99f3ed00668aae06 | 67 | py | Python | akimous/modeling/feature/__init__.py | akimous/akimous | 1828c09407bc32b233500647290d698ba5e5549f | [
"BSD-3-Clause"
] | 12 | 2019-11-14T14:20:33.000Z | 2022-03-27T15:24:45.000Z | akimous/modeling/feature/__init__.py | akimous/akimous | 1828c09407bc32b233500647290d698ba5e5549f | [
"BSD-3-Clause"
] | 7 | 2020-04-05T05:37:52.000Z | 2020-09-27T14:21:41.000Z | akimous/modeling/feature/__init__.py | akimous/akimous | 1828c09407bc32b233500647290d698ba5e5549f | [
"BSD-3-Clause"
] | 3 | 2020-03-23T17:31:39.000Z | 2022-03-27T15:24:53.000Z | from .context_features import *
from .completion_features import *
| 22.333333 | 34 | 0.820896 | 8 | 67 | 6.625 | 0.625 | 0.528302 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.119403 | 67 | 2 | 35 | 33.5 | 0.898305 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
65e5c4caefb7f5f13df5907a3d9f48083542add1 | 4,241 | py | Python | tests_3_6/test_represent.py | sobolevn/icontract | 8dd3c236fd75f74fef722311e2de65d036b9c6a7 | [
"MIT"
] | null | null | null | tests_3_6/test_represent.py | sobolevn/icontract | 8dd3c236fd75f74fef722311e2de65d036b9c6a7 | [
"MIT"
] | null | null | null | tests_3_6/test_represent.py | sobolevn/icontract | 8dd3c236fd75f74fef722311e2de65d036b9c6a7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# pylint: disable=missing-docstring,invalid-name,too-many-public-methods,no-self-use
# pylint: disable=unused-argument
import textwrap
import unittest
import math
from typing import Optional # pylint: disable=unused-import
import icontract._represent
import tests.error
import tests.mock
class TestLiteralStringInterpolation(unittest.TestCase):
def test_plain_string(self) -> None:
@icontract.require(lambda x: f"something" == '')
def func(x: float) -> float:
return x
violation_err = None # type: Optional[icontract.ViolationError]
try:
func(x=0)
except icontract.ViolationError as err:
violation_err = err
self.assertIsNotNone(violation_err)
self.assertEqual(
'f"something" == \'\': f"something" was \'something\'',
tests.error.wo_mandatory_location(str(violation_err)))
def test_simple_interpolation(self) -> None:
@icontract.require(lambda x: f"{x}" == '')
def func(x: float) -> float:
return x
violation_err = None # type: Optional[icontract.ViolationError]
try:
func(x=0)
except icontract.ViolationError as err:
violation_err = err
self.assertIsNotNone(violation_err)
self.assertEqual(
'f"{x}" == \'\': f"{x}" was \'0\'',
tests.error.wo_mandatory_location(str(violation_err)))
def test_string_formatting(self) -> None:
@icontract.require(lambda x: f"{x!s}" == '')
def func(x: float) -> float:
return x
violation_err = None # type: Optional[icontract.ViolationError]
try:
func(x=1.984)
except icontract.ViolationError as err:
violation_err = err
self.assertIsNotNone(violation_err)
self.assertEqual(
'f"{x!s}" == \'\': f"{x!s}" was \'1.984\'',
tests.error.wo_mandatory_location(str(violation_err)))
def test_repr_formatting(self) -> None:
@icontract.require(lambda x: f"{x!r}" == '')
def func(x: float) -> float:
return x
violation_err = None # type: Optional[icontract.ViolationError]
try:
func(x=1.984)
except icontract.ViolationError as err:
violation_err = err
self.assertIsNotNone(violation_err)
self.assertEqual(
'f"{x!r}" == \'\': f"{x!r}" was \'1.984\'',
tests.error.wo_mandatory_location(str(violation_err)))
def test_ascii_formatting(self) -> None:
@icontract.require(lambda x: f"{x!a}" == '')
def func(x: float) -> float:
return x
violation_err = None # type: Optional[icontract.ViolationError]
try:
func(x=1.984)
except icontract.ViolationError as err:
violation_err = err
self.assertIsNotNone(violation_err)
self.assertEqual(
'f"{x!a}" == \'\': f"{x!a}" was \'1.984\'',
tests.error.wo_mandatory_location(str(violation_err)))
def test_format_spec(self) -> None:
@icontract.require(lambda x: f"{x:.3}" == '')
def func(x: float) -> float:
return x
violation_err = None # type: Optional[icontract.ViolationError]
try:
func(x=1.984)
except icontract.ViolationError as err:
violation_err = err
self.assertIsNotNone(violation_err)
self.assertEqual(
'f"{x:.3}" == \'\': f"{x:.3}" was \'1.98\'',
tests.error.wo_mandatory_location(str(violation_err)))
def test_conversion_and_format_spec(self) -> None:
@icontract.require(lambda x: f"{x!r:.3}" == '')
def func(x: float) -> float:
return x
violation_err = None # type: Optional[icontract.ViolationError]
try:
func(x=1.984)
except icontract.ViolationError as err:
violation_err = err
self.assertIsNotNone(violation_err)
self.assertEqual(
'f"{x!r:.3}" == \'\': f"{x!r:.3}" was \'1.9\'',
tests.error.wo_mandatory_location(str(violation_err)))
if __name__ == '__main__':
unittest.main()
| 32.374046 | 84 | 0.585475 | 489 | 4,241 | 4.940695 | 0.165644 | 0.139073 | 0.049255 | 0.069536 | 0.80505 | 0.80505 | 0.80505 | 0.791805 | 0.759934 | 0.705712 | 0 | 0.015415 | 0.281066 | 4,241 | 130 | 85 | 32.623077 | 0.776976 | 0.106814 | 0 | 0.693069 | 0 | 0 | 0.069897 | 0 | 0 | 0 | 0 | 0 | 0.138614 | 1 | 0.138614 | false | 0 | 0.069307 | 0.069307 | 0.287129 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
02f45b627ed7ec1ce05db516b378d0dbbac800ec | 44 | py | Python | config/__init__.py | nikanar/OpenKE | f88d54e3b019c02035a39e5de7c732c16277dd7e | [
"MIT"
] | null | null | null | config/__init__.py | nikanar/OpenKE | f88d54e3b019c02035a39e5de7c732c16277dd7e | [
"MIT"
] | 1 | 2018-06-07T18:38:16.000Z | 2018-06-07T18:38:16.000Z | config/__init__.py | nikanar/OpenKE | f88d54e3b019c02035a39e5de7c732c16277dd7e | [
"MIT"
] | 1 | 2018-06-07T16:18:35.000Z | 2018-06-07T16:18:35.000Z | from . import Config
from Config import *
| 14.666667 | 21 | 0.727273 | 6 | 44 | 5.333333 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.227273 | 44 | 2 | 22 | 22 | 0.941176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
02f64f1ac3eaffb3ae8e805920dc7760a38fa59a | 263 | py | Python | reaver/models/__init__.py | HatsuneMiku4/reaver | 059320ce109498ec4100fcc2cee32177c427f1ea | [
"MIT"
] | null | null | null | reaver/models/__init__.py | HatsuneMiku4/reaver | 059320ce109498ec4100fcc2cee32177c427f1ea | [
"MIT"
] | null | null | null | reaver/models/__init__.py | HatsuneMiku4/reaver | 059320ce109498ec4100fcc2cee32177c427f1ea | [
"MIT"
] | null | null | null | from reaver.models.base import layers, build_mlp, build_cnn_nature, MultiPolicy
from reaver.models.sc2 import build_fully_conv, SC2MultiPolicy
from reaver.models.sc2.relational import build_relational
from reaver.models.sc2.policy import SC2RelationalMultiPolicy
| 52.6 | 79 | 0.870722 | 36 | 263 | 6.194444 | 0.5 | 0.179372 | 0.286996 | 0.255605 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.020576 | 0.076046 | 263 | 4 | 80 | 65.75 | 0.897119 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
f320c383c29a50216d77e94d570494348f0c7c56 | 397,984 | py | Python | sdk/cdn/azure-mgmt-cdn/azure/mgmt/cdn/models/_models_py3.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/cdn/azure-mgmt-cdn/azure/mgmt/cdn/models/_models_py3.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/cdn/azure-mgmt-cdn/azure/mgmt/cdn/models/_models_py3.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._cdn_management_client_enums import *
class Resource(msrest.serialization.Model):
"""The core properties of ARM resources.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.system_data = None
class AFDDomain(Resource):
"""Friendly domain name mapping to the endpoint hostname that the customer provides for branding purposes, e.g. www.contoso.com.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param tls_settings: The configuration specifying how to enable HTTPS for the domain - using
AzureFrontDoor managed certificate or user's own certificate. If not specified, enabling ssl
uses AzureFrontDoor managed certificate by default.
:type tls_settings: ~azure.mgmt.cdn.models.AFDDomainHttpsParameters
:param azure_dns_zone: Resource reference to the Azure DNS zone.
:type azure_dns_zone: ~azure.mgmt.cdn.models.ResourceReference
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:ivar domain_validation_state: Provisioning substate shows the progress of custom HTTPS
enabling/disabling process step by step. DCV stands for DomainControlValidation. Possible
values include: "Unknown", "Submitting", "Pending", "TimedOut", "PendingRevalidation",
"Approved".
:vartype domain_validation_state: str or ~azure.mgmt.cdn.models.DomainValidationState
:param host_name: The host name of the domain. Must be a domain name.
:type host_name: str
:ivar validation_properties: Values the customer needs to validate domain ownership.
:vartype validation_properties: ~azure.mgmt.cdn.models.DomainValidationProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
'domain_validation_state': {'readonly': True},
'validation_properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'tls_settings': {'key': 'properties.tlsSettings', 'type': 'AFDDomainHttpsParameters'},
'azure_dns_zone': {'key': 'properties.azureDnsZone', 'type': 'ResourceReference'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
'domain_validation_state': {'key': 'properties.domainValidationState', 'type': 'str'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'validation_properties': {'key': 'properties.validationProperties', 'type': 'DomainValidationProperties'},
}
def __init__(
self,
*,
tls_settings: Optional["AFDDomainHttpsParameters"] = None,
azure_dns_zone: Optional["ResourceReference"] = None,
host_name: Optional[str] = None,
**kwargs
):
super(AFDDomain, self).__init__(**kwargs)
self.tls_settings = tls_settings
self.azure_dns_zone = azure_dns_zone
self.provisioning_state = None
self.deployment_status = None
self.domain_validation_state = None
self.host_name = host_name
self.validation_properties = None
class AFDDomainHttpsParameters(msrest.serialization.Model):
"""The JSON object that contains the properties to secure a domain.
All required parameters must be populated in order to send to Azure.
:param certificate_type: Required. Defines the source of the SSL certificate. Possible values
include: "CustomerCertificate", "ManagedCertificate".
:type certificate_type: str or ~azure.mgmt.cdn.models.AfdCertificateType
:param minimum_tls_version: TLS protocol version that will be used for Https. Possible values
include: "TLS10", "TLS12".
:type minimum_tls_version: str or ~azure.mgmt.cdn.models.AfdMinimumTlsVersion
:param secret: Resource reference to the secret. ie. subs/rg/profile/secret.
:type secret: ~azure.mgmt.cdn.models.ResourceReference
"""
_validation = {
'certificate_type': {'required': True},
}
_attribute_map = {
'certificate_type': {'key': 'certificateType', 'type': 'str'},
'minimum_tls_version': {'key': 'minimumTlsVersion', 'type': 'str'},
'secret': {'key': 'secret', 'type': 'ResourceReference'},
}
def __init__(
self,
*,
certificate_type: Union[str, "AfdCertificateType"],
minimum_tls_version: Optional[Union[str, "AfdMinimumTlsVersion"]] = None,
secret: Optional["ResourceReference"] = None,
**kwargs
):
super(AFDDomainHttpsParameters, self).__init__(**kwargs)
self.certificate_type = certificate_type
self.minimum_tls_version = minimum_tls_version
self.secret = secret
class AFDDomainListResult(msrest.serialization.Model):
"""Result of the request to list domains. It contains a list of domain objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of AzureFrontDoor domains within a profile.
:vartype value: list[~azure.mgmt.cdn.models.AFDDomain]
:param next_link: URL to get the next set of domain objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AFDDomain]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(AFDDomainListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class AFDStateProperties(msrest.serialization.Model):
"""The tracking states for afd resources.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(AFDStateProperties, self).__init__(**kwargs)
self.provisioning_state = None
self.deployment_status = None
class AFDDomainUpdatePropertiesParameters(msrest.serialization.Model):
"""The JSON object that contains the properties of the domain to create.
:param tls_settings: The configuration specifying how to enable HTTPS for the domain - using
AzureFrontDoor managed certificate or user's own certificate. If not specified, enabling ssl
uses AzureFrontDoor managed certificate by default.
:type tls_settings: ~azure.mgmt.cdn.models.AFDDomainHttpsParameters
:param azure_dns_zone: Resource reference to the Azure DNS zone.
:type azure_dns_zone: ~azure.mgmt.cdn.models.ResourceReference
"""
_attribute_map = {
'tls_settings': {'key': 'tlsSettings', 'type': 'AFDDomainHttpsParameters'},
'azure_dns_zone': {'key': 'azureDnsZone', 'type': 'ResourceReference'},
}
def __init__(
self,
*,
tls_settings: Optional["AFDDomainHttpsParameters"] = None,
azure_dns_zone: Optional["ResourceReference"] = None,
**kwargs
):
super(AFDDomainUpdatePropertiesParameters, self).__init__(**kwargs)
self.tls_settings = tls_settings
self.azure_dns_zone = azure_dns_zone
class AFDDomainProperties(AFDDomainUpdatePropertiesParameters, AFDStateProperties):
"""The JSON object that contains the properties of the domain to create.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:param tls_settings: The configuration specifying how to enable HTTPS for the domain - using
AzureFrontDoor managed certificate or user's own certificate. If not specified, enabling ssl
uses AzureFrontDoor managed certificate by default.
:type tls_settings: ~azure.mgmt.cdn.models.AFDDomainHttpsParameters
:param azure_dns_zone: Resource reference to the Azure DNS zone.
:type azure_dns_zone: ~azure.mgmt.cdn.models.ResourceReference
:ivar domain_validation_state: Provisioning substate shows the progress of custom HTTPS
enabling/disabling process step by step. DCV stands for DomainControlValidation. Possible
values include: "Unknown", "Submitting", "Pending", "TimedOut", "PendingRevalidation",
"Approved".
:vartype domain_validation_state: str or ~azure.mgmt.cdn.models.DomainValidationState
:param host_name: Required. The host name of the domain. Must be a domain name.
:type host_name: str
:ivar validation_properties: Values the customer needs to validate domain ownership.
:vartype validation_properties: ~azure.mgmt.cdn.models.DomainValidationProperties
"""
_validation = {
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
'domain_validation_state': {'readonly': True},
'host_name': {'required': True},
'validation_properties': {'readonly': True},
}
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
'tls_settings': {'key': 'tlsSettings', 'type': 'AFDDomainHttpsParameters'},
'azure_dns_zone': {'key': 'azureDnsZone', 'type': 'ResourceReference'},
'domain_validation_state': {'key': 'domainValidationState', 'type': 'str'},
'host_name': {'key': 'hostName', 'type': 'str'},
'validation_properties': {'key': 'validationProperties', 'type': 'DomainValidationProperties'},
}
def __init__(
self,
*,
host_name: str,
tls_settings: Optional["AFDDomainHttpsParameters"] = None,
azure_dns_zone: Optional["ResourceReference"] = None,
**kwargs
):
super(AFDDomainProperties, self).__init__(tls_settings=tls_settings, azure_dns_zone=azure_dns_zone, **kwargs)
self.provisioning_state = None
self.deployment_status = None
self.domain_validation_state = None
self.host_name = host_name
self.validation_properties = None
self.tls_settings = tls_settings
self.azure_dns_zone = azure_dns_zone
self.domain_validation_state = None
self.host_name = host_name
self.validation_properties = None
class AFDDomainUpdateParameters(msrest.serialization.Model):
"""The domain JSON object required for domain creation or update.
:param tls_settings: The configuration specifying how to enable HTTPS for the domain - using
AzureFrontDoor managed certificate or user's own certificate. If not specified, enabling ssl
uses AzureFrontDoor managed certificate by default.
:type tls_settings: ~azure.mgmt.cdn.models.AFDDomainHttpsParameters
:param azure_dns_zone: Resource reference to the Azure DNS zone.
:type azure_dns_zone: ~azure.mgmt.cdn.models.ResourceReference
"""
_attribute_map = {
'tls_settings': {'key': 'properties.tlsSettings', 'type': 'AFDDomainHttpsParameters'},
'azure_dns_zone': {'key': 'properties.azureDnsZone', 'type': 'ResourceReference'},
}
def __init__(
self,
*,
tls_settings: Optional["AFDDomainHttpsParameters"] = None,
azure_dns_zone: Optional["ResourceReference"] = None,
**kwargs
):
super(AFDDomainUpdateParameters, self).__init__(**kwargs)
self.tls_settings = tls_settings
self.azure_dns_zone = azure_dns_zone
class TrackedResource(Resource):
"""The resource model definition for a ARM tracked top level resource.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param location: Required. Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(TrackedResource, self).__init__(**kwargs)
self.location = location
self.tags = tags
class AFDEndpoint(TrackedResource):
"""CDN endpoint is the entity within a CDN profile containing configuration information such as origin, protocol, content caching and delivery behavior. The AzureFrontDoor endpoint uses the URL format :code:`<endpointname>`.azureedge.net.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param location: Required. Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param origin_response_timeout_seconds: Send and receive timeout on forwarding request to the
origin. When timeout is reached, the request fails and returns.
:type origin_response_timeout_seconds: int
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:ivar host_name: The host name of the endpoint structured as {endpointName}.{DNSZone}, e.g.
contoso.azureedge.net.
:vartype host_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'location': {'required': True},
'origin_response_timeout_seconds': {'minimum': 16},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
'host_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'origin_response_timeout_seconds': {'key': 'properties.originResponseTimeoutSeconds', 'type': 'int'},
'enabled_state': {'key': 'properties.enabledState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
origin_response_timeout_seconds: Optional[int] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDEndpoint, self).__init__(location=location, tags=tags, **kwargs)
self.origin_response_timeout_seconds = origin_response_timeout_seconds
self.enabled_state = enabled_state
self.provisioning_state = None
self.deployment_status = None
self.host_name = None
class AFDEndpointListResult(msrest.serialization.Model):
"""Result of the request to list endpoints. It contains a list of endpoint objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of AzureFrontDoor endpoints within a profile.
:vartype value: list[~azure.mgmt.cdn.models.AFDEndpoint]
:param next_link: URL to get the next set of endpoint objects if there is any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AFDEndpoint]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(AFDEndpointListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class AFDEndpointPropertiesUpdateParameters(msrest.serialization.Model):
"""The JSON object containing endpoint update parameters.
:param origin_response_timeout_seconds: Send and receive timeout on forwarding request to the
origin. When timeout is reached, the request fails and returns.
:type origin_response_timeout_seconds: int
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_validation = {
'origin_response_timeout_seconds': {'minimum': 16},
}
_attribute_map = {
'origin_response_timeout_seconds': {'key': 'originResponseTimeoutSeconds', 'type': 'int'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
}
def __init__(
self,
*,
origin_response_timeout_seconds: Optional[int] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDEndpointPropertiesUpdateParameters, self).__init__(**kwargs)
self.origin_response_timeout_seconds = origin_response_timeout_seconds
self.enabled_state = enabled_state
class AFDEndpointProperties(AFDStateProperties, AFDEndpointPropertiesUpdateParameters):
"""The JSON object that contains the properties required to create an endpoint.
Variables are only populated by the server, and will be ignored when sending a request.
:param origin_response_timeout_seconds: Send and receive timeout on forwarding request to the
origin. When timeout is reached, the request fails and returns.
:type origin_response_timeout_seconds: int
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:ivar host_name: The host name of the endpoint structured as {endpointName}.{DNSZone}, e.g.
contoso.azureedge.net.
:vartype host_name: str
"""
_validation = {
'origin_response_timeout_seconds': {'minimum': 16},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
'host_name': {'readonly': True},
}
_attribute_map = {
'origin_response_timeout_seconds': {'key': 'originResponseTimeoutSeconds', 'type': 'int'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
'host_name': {'key': 'hostName', 'type': 'str'},
}
def __init__(
self,
*,
origin_response_timeout_seconds: Optional[int] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDEndpointProperties, self).__init__(origin_response_timeout_seconds=origin_response_timeout_seconds, enabled_state=enabled_state, **kwargs)
self.origin_response_timeout_seconds = origin_response_timeout_seconds
self.enabled_state = enabled_state
self.host_name = None
self.provisioning_state = None
self.deployment_status = None
self.host_name = None
class AFDEndpointUpdateParameters(msrest.serialization.Model):
"""Properties required to create or update an endpoint.
:param tags: A set of tags. Endpoint tags.
:type tags: dict[str, str]
:param origin_response_timeout_seconds: Send and receive timeout on forwarding request to the
origin. When timeout is reached, the request fails and returns.
:type origin_response_timeout_seconds: int
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_validation = {
'origin_response_timeout_seconds': {'minimum': 16},
}
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'origin_response_timeout_seconds': {'key': 'properties.originResponseTimeoutSeconds', 'type': 'int'},
'enabled_state': {'key': 'properties.enabledState', 'type': 'str'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
origin_response_timeout_seconds: Optional[int] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDEndpointUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.origin_response_timeout_seconds = origin_response_timeout_seconds
self.enabled_state = enabled_state
class AfdErrorResponse(msrest.serialization.Model):
"""Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).
:param error: The error object.
:type error: ~azure.mgmt.cdn.models.ErrorResponse
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorResponse'},
}
def __init__(
self,
*,
error: Optional["ErrorResponse"] = None,
**kwargs
):
super(AfdErrorResponse, self).__init__(**kwargs)
self.error = error
class AFDOrigin(Resource):
"""CDN origin is the source of the content being delivered via CDN. When the edge nodes represented by an endpoint do not have the requested content cached, they attempt to fetch it from one or more of the configured origins.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param azure_origin: Resource reference to the Azure origin resource.
:type azure_origin: ~azure.mgmt.cdn.models.ResourceReference
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param shared_private_link_resource: The properties of the private link resource for private
origin.
:type shared_private_link_resource: object
:param enabled_state: Whether to enable health probes to be made against backends defined under
backendPools. Health probes can only be disabled if there is a single enabled backend in single
enabled backend pool. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'azure_origin': {'key': 'properties.azureOrigin', 'type': 'ResourceReference'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'http_port': {'key': 'properties.httpPort', 'type': 'int'},
'https_port': {'key': 'properties.httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'weight': {'key': 'properties.weight', 'type': 'int'},
'shared_private_link_resource': {'key': 'properties.sharedPrivateLinkResource', 'type': 'object'},
'enabled_state': {'key': 'properties.enabledState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
azure_origin: Optional["ResourceReference"] = None,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
shared_private_link_resource: Optional[object] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOrigin, self).__init__(**kwargs)
self.azure_origin = azure_origin
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.shared_private_link_resource = shared_private_link_resource
self.enabled_state = enabled_state
self.provisioning_state = None
self.deployment_status = None
class AFDOriginGroup(Resource):
"""AFDOrigin group comprising of origins is used for load balancing to origins when the content cannot be served from CDN.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param load_balancing_settings: Load balancing settings for a backend pool.
:type load_balancing_settings: ~azure.mgmt.cdn.models.LoadBalancingSettingsParameters
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_afd_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_afd_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
:param session_affinity_state: Whether to allow session affinity on this host. Valid options
are 'Enabled' or 'Disabled'. Possible values include: "Enabled", "Disabled".
:type session_affinity_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'load_balancing_settings': {'key': 'properties.loadBalancingSettings', 'type': 'LoadBalancingSettingsParameters'},
'health_probe_settings': {'key': 'properties.healthProbeSettings', 'type': 'HealthProbeParameters'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'properties.trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_afd_origin_error_detection_settings': {'key': 'properties.responseBasedAfdOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
'session_affinity_state': {'key': 'properties.sessionAffinityState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
load_balancing_settings: Optional["LoadBalancingSettingsParameters"] = None,
health_probe_settings: Optional["HealthProbeParameters"] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_afd_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
session_affinity_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOriginGroup, self).__init__(**kwargs)
self.load_balancing_settings = load_balancing_settings
self.health_probe_settings = health_probe_settings
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_afd_origin_error_detection_settings = response_based_afd_origin_error_detection_settings
self.session_affinity_state = session_affinity_state
self.provisioning_state = None
self.deployment_status = None
class AFDOriginGroupListResult(msrest.serialization.Model):
"""Result of the request to list origin groups. It contains a list of origin groups objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN origin groups within an endpoint.
:vartype value: list[~azure.mgmt.cdn.models.AFDOriginGroup]
:param next_link: URL to get the next set of origin objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AFDOriginGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(AFDOriginGroupListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class AFDOriginGroupUpdatePropertiesParameters(msrest.serialization.Model):
"""The JSON object that contains the properties of the origin group.
:param load_balancing_settings: Load balancing settings for a backend pool.
:type load_balancing_settings: ~azure.mgmt.cdn.models.LoadBalancingSettingsParameters
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_afd_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_afd_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
:param session_affinity_state: Whether to allow session affinity on this host. Valid options
are 'Enabled' or 'Disabled'. Possible values include: "Enabled", "Disabled".
:type session_affinity_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_validation = {
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
}
_attribute_map = {
'load_balancing_settings': {'key': 'loadBalancingSettings', 'type': 'LoadBalancingSettingsParameters'},
'health_probe_settings': {'key': 'healthProbeSettings', 'type': 'HealthProbeParameters'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_afd_origin_error_detection_settings': {'key': 'responseBasedAfdOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
'session_affinity_state': {'key': 'sessionAffinityState', 'type': 'str'},
}
def __init__(
self,
*,
load_balancing_settings: Optional["LoadBalancingSettingsParameters"] = None,
health_probe_settings: Optional["HealthProbeParameters"] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_afd_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
session_affinity_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOriginGroupUpdatePropertiesParameters, self).__init__(**kwargs)
self.load_balancing_settings = load_balancing_settings
self.health_probe_settings = health_probe_settings
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_afd_origin_error_detection_settings = response_based_afd_origin_error_detection_settings
self.session_affinity_state = session_affinity_state
class AFDOriginGroupProperties(AFDStateProperties, AFDOriginGroupUpdatePropertiesParameters):
"""The JSON object that contains the properties of the origin group.
Variables are only populated by the server, and will be ignored when sending a request.
:param load_balancing_settings: Load balancing settings for a backend pool.
:type load_balancing_settings: ~azure.mgmt.cdn.models.LoadBalancingSettingsParameters
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_afd_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_afd_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
:param session_affinity_state: Whether to allow session affinity on this host. Valid options
are 'Enabled' or 'Disabled'. Possible values include: "Enabled", "Disabled".
:type session_affinity_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'load_balancing_settings': {'key': 'loadBalancingSettings', 'type': 'LoadBalancingSettingsParameters'},
'health_probe_settings': {'key': 'healthProbeSettings', 'type': 'HealthProbeParameters'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_afd_origin_error_detection_settings': {'key': 'responseBasedAfdOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
'session_affinity_state': {'key': 'sessionAffinityState', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
load_balancing_settings: Optional["LoadBalancingSettingsParameters"] = None,
health_probe_settings: Optional["HealthProbeParameters"] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_afd_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
session_affinity_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOriginGroupProperties, self).__init__(load_balancing_settings=load_balancing_settings, health_probe_settings=health_probe_settings, traffic_restoration_time_to_healed_or_new_endpoints_in_minutes=traffic_restoration_time_to_healed_or_new_endpoints_in_minutes, response_based_afd_origin_error_detection_settings=response_based_afd_origin_error_detection_settings, session_affinity_state=session_affinity_state, **kwargs)
self.load_balancing_settings = load_balancing_settings
self.health_probe_settings = health_probe_settings
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_afd_origin_error_detection_settings = response_based_afd_origin_error_detection_settings
self.session_affinity_state = session_affinity_state
self.provisioning_state = None
self.deployment_status = None
class AFDOriginGroupUpdateParameters(msrest.serialization.Model):
"""AFDOrigin group properties needed for origin group creation or update.
:param load_balancing_settings: Load balancing settings for a backend pool.
:type load_balancing_settings: ~azure.mgmt.cdn.models.LoadBalancingSettingsParameters
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_afd_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_afd_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
:param session_affinity_state: Whether to allow session affinity on this host. Valid options
are 'Enabled' or 'Disabled'. Possible values include: "Enabled", "Disabled".
:type session_affinity_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_validation = {
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
}
_attribute_map = {
'load_balancing_settings': {'key': 'properties.loadBalancingSettings', 'type': 'LoadBalancingSettingsParameters'},
'health_probe_settings': {'key': 'properties.healthProbeSettings', 'type': 'HealthProbeParameters'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'properties.trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_afd_origin_error_detection_settings': {'key': 'properties.responseBasedAfdOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
'session_affinity_state': {'key': 'properties.sessionAffinityState', 'type': 'str'},
}
def __init__(
self,
*,
load_balancing_settings: Optional["LoadBalancingSettingsParameters"] = None,
health_probe_settings: Optional["HealthProbeParameters"] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_afd_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
session_affinity_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOriginGroupUpdateParameters, self).__init__(**kwargs)
self.load_balancing_settings = load_balancing_settings
self.health_probe_settings = health_probe_settings
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_afd_origin_error_detection_settings = response_based_afd_origin_error_detection_settings
self.session_affinity_state = session_affinity_state
class AFDOriginListResult(msrest.serialization.Model):
"""Result of the request to list origins. It contains a list of origin objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN origins within an endpoint.
:vartype value: list[~azure.mgmt.cdn.models.AFDOrigin]
:param next_link: URL to get the next set of origin objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[AFDOrigin]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(AFDOriginListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class AFDOriginUpdatePropertiesParameters(msrest.serialization.Model):
"""The JSON object that contains the properties of the origin.
:param azure_origin: Resource reference to the Azure origin resource.
:type azure_origin: ~azure.mgmt.cdn.models.ResourceReference
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param shared_private_link_resource: The properties of the private link resource for private
origin.
:type shared_private_link_resource: object
:param enabled_state: Whether to enable health probes to be made against backends defined under
backendPools. Health probes can only be disabled if there is a single enabled backend in single
enabled backend pool. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_validation = {
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'azure_origin': {'key': 'azureOrigin', 'type': 'ResourceReference'},
'host_name': {'key': 'hostName', 'type': 'str'},
'http_port': {'key': 'httpPort', 'type': 'int'},
'https_port': {'key': 'httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'originHostHeader', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'weight': {'key': 'weight', 'type': 'int'},
'shared_private_link_resource': {'key': 'sharedPrivateLinkResource', 'type': 'object'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
}
def __init__(
self,
*,
azure_origin: Optional["ResourceReference"] = None,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
shared_private_link_resource: Optional[object] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOriginUpdatePropertiesParameters, self).__init__(**kwargs)
self.azure_origin = azure_origin
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.shared_private_link_resource = shared_private_link_resource
self.enabled_state = enabled_state
class AFDOriginProperties(AFDStateProperties, AFDOriginUpdatePropertiesParameters):
"""The JSON object that contains the properties of the origin.
Variables are only populated by the server, and will be ignored when sending a request.
:param azure_origin: Resource reference to the Azure origin resource.
:type azure_origin: ~azure.mgmt.cdn.models.ResourceReference
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param shared_private_link_resource: The properties of the private link resource for private
origin.
:type shared_private_link_resource: object
:param enabled_state: Whether to enable health probes to be made against backends defined under
backendPools. Health probes can only be disabled if there is a single enabled backend in single
enabled backend pool. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'azure_origin': {'key': 'azureOrigin', 'type': 'ResourceReference'},
'host_name': {'key': 'hostName', 'type': 'str'},
'http_port': {'key': 'httpPort', 'type': 'int'},
'https_port': {'key': 'httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'originHostHeader', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'weight': {'key': 'weight', 'type': 'int'},
'shared_private_link_resource': {'key': 'sharedPrivateLinkResource', 'type': 'object'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
azure_origin: Optional["ResourceReference"] = None,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
shared_private_link_resource: Optional[object] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOriginProperties, self).__init__(azure_origin=azure_origin, host_name=host_name, http_port=http_port, https_port=https_port, origin_host_header=origin_host_header, priority=priority, weight=weight, shared_private_link_resource=shared_private_link_resource, enabled_state=enabled_state, **kwargs)
self.azure_origin = azure_origin
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.shared_private_link_resource = shared_private_link_resource
self.enabled_state = enabled_state
self.provisioning_state = None
self.deployment_status = None
class AFDOriginUpdateParameters(msrest.serialization.Model):
"""AFDOrigin properties needed for origin update.
:param azure_origin: Resource reference to the Azure origin resource.
:type azure_origin: ~azure.mgmt.cdn.models.ResourceReference
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param shared_private_link_resource: The properties of the private link resource for private
origin.
:type shared_private_link_resource: object
:param enabled_state: Whether to enable health probes to be made against backends defined under
backendPools. Health probes can only be disabled if there is a single enabled backend in single
enabled backend pool. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_validation = {
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'azure_origin': {'key': 'properties.azureOrigin', 'type': 'ResourceReference'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'http_port': {'key': 'properties.httpPort', 'type': 'int'},
'https_port': {'key': 'properties.httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'weight': {'key': 'properties.weight', 'type': 'int'},
'shared_private_link_resource': {'key': 'properties.sharedPrivateLinkResource', 'type': 'object'},
'enabled_state': {'key': 'properties.enabledState', 'type': 'str'},
}
def __init__(
self,
*,
azure_origin: Optional["ResourceReference"] = None,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
shared_private_link_resource: Optional[object] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(AFDOriginUpdateParameters, self).__init__(**kwargs)
self.azure_origin = azure_origin
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.shared_private_link_resource = shared_private_link_resource
self.enabled_state = enabled_state
class AfdPurgeParameters(msrest.serialization.Model):
"""Parameters required for content purge.
All required parameters must be populated in order to send to Azure.
:param content_paths: Required. The path to the content to be purged. Can describe a file path
or a wild card directory.
:type content_paths: list[str]
:param domains: List of domains.
:type domains: list[str]
"""
_validation = {
'content_paths': {'required': True},
}
_attribute_map = {
'content_paths': {'key': 'contentPaths', 'type': '[str]'},
'domains': {'key': 'domains', 'type': '[str]'},
}
def __init__(
self,
*,
content_paths: List[str],
domains: Optional[List[str]] = None,
**kwargs
):
super(AfdPurgeParameters, self).__init__(**kwargs)
self.content_paths = content_paths
self.domains = domains
class CacheExpirationActionParameters(msrest.serialization.Model):
"""Defines the parameters for the cache expiration action.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleCacheExpirationActionParameters".
:vartype odata_type: str
:param cache_behavior: Required. Caching behavior for the requests. Possible values include:
"BypassCache", "Override", "SetIfMissing".
:type cache_behavior: str or ~azure.mgmt.cdn.models.CacheBehavior
:param cache_type: Required. The level at which the content needs to be cached. Possible values
include: "All".
:type cache_type: str or ~azure.mgmt.cdn.models.CacheType
:param cache_duration: The duration for which the content needs to be cached. Allowed format is
[d.]hh:mm:ss.
:type cache_duration: str
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'cache_behavior': {'required': True},
'cache_type': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'cache_behavior': {'key': 'cacheBehavior', 'type': 'str'},
'cache_type': {'key': 'cacheType', 'type': 'str'},
'cache_duration': {'key': 'cacheDuration', 'type': 'str'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleCacheExpirationActionParameters"
def __init__(
self,
*,
cache_behavior: Union[str, "CacheBehavior"],
cache_type: Union[str, "CacheType"],
cache_duration: Optional[str] = None,
**kwargs
):
super(CacheExpirationActionParameters, self).__init__(**kwargs)
self.cache_behavior = cache_behavior
self.cache_type = cache_type
self.cache_duration = cache_duration
class CacheKeyQueryStringActionParameters(msrest.serialization.Model):
"""Defines the parameters for the cache-key query string action.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleCacheKeyQueryStringBehaviorActionParameters".
:vartype odata_type: str
:param query_string_behavior: Required. Caching behavior for the requests. Possible values
include: "Include", "IncludeAll", "Exclude", "ExcludeAll".
:type query_string_behavior: str or ~azure.mgmt.cdn.models.QueryStringBehavior
:param query_parameters: query parameters to include or exclude (comma separated).
:type query_parameters: str
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'query_string_behavior': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'query_string_behavior': {'key': 'queryStringBehavior', 'type': 'str'},
'query_parameters': {'key': 'queryParameters', 'type': 'str'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleCacheKeyQueryStringBehaviorActionParameters"
def __init__(
self,
*,
query_string_behavior: Union[str, "QueryStringBehavior"],
query_parameters: Optional[str] = None,
**kwargs
):
super(CacheKeyQueryStringActionParameters, self).__init__(**kwargs)
self.query_string_behavior = query_string_behavior
self.query_parameters = query_parameters
class CdnCertificateSourceParameters(msrest.serialization.Model):
"""Defines the parameters for using CDN managed certificate for securing custom domain.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.CdnCertificateSourceParameters".
:vartype odata_type: str
:param certificate_type: Required. Type of certificate used. Possible values include: "Shared",
"Dedicated".
:type certificate_type: str or ~azure.mgmt.cdn.models.CertificateType
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'certificate_type': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'certificate_type': {'key': 'certificateType', 'type': 'str'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.CdnCertificateSourceParameters"
def __init__(
self,
*,
certificate_type: Union[str, "CertificateType"],
**kwargs
):
super(CdnCertificateSourceParameters, self).__init__(**kwargs)
self.certificate_type = certificate_type
class CdnEndpoint(msrest.serialization.Model):
"""Defines the ARM Resource ID for the linked endpoints.
:param id: ARM Resource ID string.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
super(CdnEndpoint, self).__init__(**kwargs)
self.id = id
class CustomDomainHttpsParameters(msrest.serialization.Model):
"""The JSON object that contains the properties to secure a custom domain.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: UserManagedHttpsParameters, CdnManagedHttpsParameters.
All required parameters must be populated in order to send to Azure.
:param certificate_source: Required. Defines the source of the SSL certificate.Constant filled
by server. Possible values include: "AzureKeyVault", "Cdn".
:type certificate_source: str or ~azure.mgmt.cdn.models.CertificateSource
:param protocol_type: Required. Defines the TLS extension protocol that is used for secure
delivery. Possible values include: "ServerNameIndication", "IPBased".
:type protocol_type: str or ~azure.mgmt.cdn.models.ProtocolType
:param minimum_tls_version: TLS protocol version that will be used for Https. Possible values
include: "None", "TLS10", "TLS12".
:type minimum_tls_version: str or ~azure.mgmt.cdn.models.MinimumTlsVersion
"""
_validation = {
'certificate_source': {'required': True},
'protocol_type': {'required': True},
}
_attribute_map = {
'certificate_source': {'key': 'certificateSource', 'type': 'str'},
'protocol_type': {'key': 'protocolType', 'type': 'str'},
'minimum_tls_version': {'key': 'minimumTlsVersion', 'type': 'str'},
}
_subtype_map = {
'certificate_source': {'AzureKeyVault': 'UserManagedHttpsParameters', 'Cdn': 'CdnManagedHttpsParameters'}
}
def __init__(
self,
*,
protocol_type: Union[str, "ProtocolType"],
minimum_tls_version: Optional[Union[str, "MinimumTlsVersion"]] = None,
**kwargs
):
super(CustomDomainHttpsParameters, self).__init__(**kwargs)
self.certificate_source = None # type: Optional[str]
self.protocol_type = protocol_type
self.minimum_tls_version = minimum_tls_version
class CdnManagedHttpsParameters(CustomDomainHttpsParameters):
"""Defines the certificate source parameters using CDN managed certificate for enabling SSL.
All required parameters must be populated in order to send to Azure.
:param certificate_source: Required. Defines the source of the SSL certificate.Constant filled
by server. Possible values include: "AzureKeyVault", "Cdn".
:type certificate_source: str or ~azure.mgmt.cdn.models.CertificateSource
:param protocol_type: Required. Defines the TLS extension protocol that is used for secure
delivery. Possible values include: "ServerNameIndication", "IPBased".
:type protocol_type: str or ~azure.mgmt.cdn.models.ProtocolType
:param minimum_tls_version: TLS protocol version that will be used for Https. Possible values
include: "None", "TLS10", "TLS12".
:type minimum_tls_version: str or ~azure.mgmt.cdn.models.MinimumTlsVersion
:param certificate_source_parameters: Required. Defines the certificate source parameters using
CDN managed certificate for enabling SSL.
:type certificate_source_parameters: ~azure.mgmt.cdn.models.CdnCertificateSourceParameters
"""
_validation = {
'certificate_source': {'required': True},
'protocol_type': {'required': True},
'certificate_source_parameters': {'required': True},
}
_attribute_map = {
'certificate_source': {'key': 'certificateSource', 'type': 'str'},
'protocol_type': {'key': 'protocolType', 'type': 'str'},
'minimum_tls_version': {'key': 'minimumTlsVersion', 'type': 'str'},
'certificate_source_parameters': {'key': 'certificateSourceParameters', 'type': 'CdnCertificateSourceParameters'},
}
def __init__(
self,
*,
protocol_type: Union[str, "ProtocolType"],
certificate_source_parameters: "CdnCertificateSourceParameters",
minimum_tls_version: Optional[Union[str, "MinimumTlsVersion"]] = None,
**kwargs
):
super(CdnManagedHttpsParameters, self).__init__(protocol_type=protocol_type, minimum_tls_version=minimum_tls_version, **kwargs)
self.certificate_source = 'Cdn' # type: str
self.certificate_source_parameters = certificate_source_parameters
class CdnWebApplicationFirewallPolicy(TrackedResource):
"""Defines web application firewall policy for Azure CDN.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param location: Required. Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param sku: Required. The pricing tier (defines a CDN provider, feature list and rate) of the
CdnWebApplicationFirewallPolicy.
:type sku: ~azure.mgmt.cdn.models.Sku
:param policy_settings: Describes policySettings for policy.
:type policy_settings: ~azure.mgmt.cdn.models.PolicySettings
:param rate_limit_rules: Describes rate limit rules inside the policy.
:type rate_limit_rules: ~azure.mgmt.cdn.models.RateLimitRuleList
:param custom_rules: Describes custom rules inside the policy.
:type custom_rules: ~azure.mgmt.cdn.models.CustomRuleList
:param managed_rules: Describes managed rules inside the policy.
:type managed_rules: ~azure.mgmt.cdn.models.ManagedRuleSetList
:ivar endpoint_links: Describes Azure CDN endpoints associated with this Web Application
Firewall policy.
:vartype endpoint_links: list[~azure.mgmt.cdn.models.CdnEndpoint]
:ivar provisioning_state: Provisioning state of the WebApplicationFirewallPolicy. Possible
values include: "Creating", "Succeeded", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.ProvisioningState
:ivar resource_state: Resource status of the policy. Possible values include: "Creating",
"Enabling", "Enabled", "Disabling", "Disabled", "Deleting".
:vartype resource_state: str or ~azure.mgmt.cdn.models.PolicyResourceState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'location': {'required': True},
'sku': {'required': True},
'endpoint_links': {'readonly': True},
'provisioning_state': {'readonly': True},
'resource_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'etag': {'key': 'etag', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'policy_settings': {'key': 'properties.policySettings', 'type': 'PolicySettings'},
'rate_limit_rules': {'key': 'properties.rateLimitRules', 'type': 'RateLimitRuleList'},
'custom_rules': {'key': 'properties.customRules', 'type': 'CustomRuleList'},
'managed_rules': {'key': 'properties.managedRules', 'type': 'ManagedRuleSetList'},
'endpoint_links': {'key': 'properties.endpointLinks', 'type': '[CdnEndpoint]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
sku: "Sku",
tags: Optional[Dict[str, str]] = None,
etag: Optional[str] = None,
policy_settings: Optional["PolicySettings"] = None,
rate_limit_rules: Optional["RateLimitRuleList"] = None,
custom_rules: Optional["CustomRuleList"] = None,
managed_rules: Optional["ManagedRuleSetList"] = None,
**kwargs
):
super(CdnWebApplicationFirewallPolicy, self).__init__(location=location, tags=tags, **kwargs)
self.etag = etag
self.sku = sku
self.policy_settings = policy_settings
self.rate_limit_rules = rate_limit_rules
self.custom_rules = custom_rules
self.managed_rules = managed_rules
self.endpoint_links = None
self.provisioning_state = None
self.resource_state = None
class CdnWebApplicationFirewallPolicyList(msrest.serialization.Model):
"""Defines a list of WebApplicationFirewallPolicies for Azure CDN. It contains a list of WebApplicationFirewallPolicy objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of Azure CDN WebApplicationFirewallPolicies within a resource group.
:vartype value: list[~azure.mgmt.cdn.models.CdnWebApplicationFirewallPolicy]
:param next_link: URL to get the next set of WebApplicationFirewallPolicy objects if there are
any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[CdnWebApplicationFirewallPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(CdnWebApplicationFirewallPolicyList, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class CdnWebApplicationFirewallPolicyPatchParameters(msrest.serialization.Model):
"""Properties required to update a CdnWebApplicationFirewallPolicy.
:param tags: A set of tags. CdnWebApplicationFirewallPolicy tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(CdnWebApplicationFirewallPolicyPatchParameters, self).__init__(**kwargs)
self.tags = tags
class Certificate(msrest.serialization.Model):
"""Certificate used for https.
:param subject: Subject name in the certificate.
:type subject: str
:param expiration_date: Certificate expiration date.
:type expiration_date: str
:param thumbprint: Certificate thumbprint.
:type thumbprint: str
"""
_attribute_map = {
'subject': {'key': 'subject', 'type': 'str'},
'expiration_date': {'key': 'expirationDate', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
}
def __init__(
self,
*,
subject: Optional[str] = None,
expiration_date: Optional[str] = None,
thumbprint: Optional[str] = None,
**kwargs
):
super(Certificate, self).__init__(**kwargs)
self.subject = subject
self.expiration_date = expiration_date
self.thumbprint = thumbprint
class CheckNameAvailabilityInput(msrest.serialization.Model):
"""Input of CheckNameAvailability API.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. The resource name to validate.
:type name: str
:ivar type: Required. The type of the resource whose name is to be validated. Default value:
"Microsoft.Cdn/Profiles/Endpoints".
:vartype type: str
"""
_validation = {
'name': {'required': True},
'type': {'required': True, 'constant': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
type = "Microsoft.Cdn/Profiles/Endpoints"
def __init__(
self,
*,
name: str,
**kwargs
):
super(CheckNameAvailabilityInput, self).__init__(**kwargs)
self.name = name
class CheckNameAvailabilityOutput(msrest.serialization.Model):
"""Output of check name availability API.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name_available: Indicates whether the name is available.
:vartype name_available: bool
:ivar reason: The reason why the name is not available.
:vartype reason: str
:ivar message: The detailed error message describing why the name is not available.
:vartype message: str
"""
_validation = {
'name_available': {'readonly': True},
'reason': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(CheckNameAvailabilityOutput, self).__init__(**kwargs)
self.name_available = None
self.reason = None
self.message = None
class CidrIpAddress(msrest.serialization.Model):
"""CIDR Ip address.
:param base_ip_address: Ip address itself.
:type base_ip_address: str
:param prefix_length: The length of the prefix of the ip address.
:type prefix_length: int
"""
_attribute_map = {
'base_ip_address': {'key': 'baseIpAddress', 'type': 'str'},
'prefix_length': {'key': 'prefixLength', 'type': 'int'},
}
def __init__(
self,
*,
base_ip_address: Optional[str] = None,
prefix_length: Optional[int] = None,
**kwargs
):
super(CidrIpAddress, self).__init__(**kwargs)
self.base_ip_address = base_ip_address
self.prefix_length = prefix_length
class Components18OrqelSchemasWafmetricsresponsePropertiesSeriesItemsPropertiesDataItems(msrest.serialization.Model):
"""Components18OrqelSchemasWafmetricsresponsePropertiesSeriesItemsPropertiesDataItems.
:param date_time:
:type date_time: ~datetime.datetime
:param value:
:type value: float
"""
_attribute_map = {
'date_time': {'key': 'dateTime', 'type': 'iso-8601'},
'value': {'key': 'value', 'type': 'float'},
}
def __init__(
self,
*,
date_time: Optional[datetime.datetime] = None,
value: Optional[float] = None,
**kwargs
):
super(Components18OrqelSchemasWafmetricsresponsePropertiesSeriesItemsPropertiesDataItems, self).__init__(**kwargs)
self.date_time = date_time
self.value = value
class Components1Gs0LlpSchemasMetricsresponsePropertiesSeriesItemsPropertiesDataItems(msrest.serialization.Model):
"""Components1Gs0LlpSchemasMetricsresponsePropertiesSeriesItemsPropertiesDataItems.
:param date_time:
:type date_time: ~datetime.datetime
:param value:
:type value: float
"""
_attribute_map = {
'date_time': {'key': 'dateTime', 'type': 'iso-8601'},
'value': {'key': 'value', 'type': 'float'},
}
def __init__(
self,
*,
date_time: Optional[datetime.datetime] = None,
value: Optional[float] = None,
**kwargs
):
super(Components1Gs0LlpSchemasMetricsresponsePropertiesSeriesItemsPropertiesDataItems, self).__init__(**kwargs)
self.date_time = date_time
self.value = value
class ComponentsKpo1PjSchemasWafrankingsresponsePropertiesDataItemsPropertiesMetricsItems(msrest.serialization.Model):
"""ComponentsKpo1PjSchemasWafrankingsresponsePropertiesDataItemsPropertiesMetricsItems.
:param metric:
:type metric: str
:param value:
:type value: long
:param percentage:
:type percentage: float
"""
_attribute_map = {
'metric': {'key': 'metric', 'type': 'str'},
'value': {'key': 'value', 'type': 'long'},
'percentage': {'key': 'percentage', 'type': 'float'},
}
def __init__(
self,
*,
metric: Optional[str] = None,
value: Optional[int] = None,
percentage: Optional[float] = None,
**kwargs
):
super(ComponentsKpo1PjSchemasWafrankingsresponsePropertiesDataItemsPropertiesMetricsItems, self).__init__(**kwargs)
self.metric = metric
self.value = value
self.percentage = percentage
class CompressionSettings(msrest.serialization.Model):
"""settings for compression.
:param content_types_to_compress: List of content types on which compression applies. The value
should be a valid MIME type.
:type content_types_to_compress: list[str]
:param is_compression_enabled: Indicates whether content compression is enabled on
AzureFrontDoor. Default value is false. If compression is enabled, content will be served as
compressed if user requests for a compressed version. Content won't be compressed on
AzureFrontDoor when requested content is smaller than 1 byte or larger than 1 MB.
:type is_compression_enabled: bool
"""
_attribute_map = {
'content_types_to_compress': {'key': 'contentTypesToCompress', 'type': '[str]'},
'is_compression_enabled': {'key': 'isCompressionEnabled', 'type': 'bool'},
}
def __init__(
self,
*,
content_types_to_compress: Optional[List[str]] = None,
is_compression_enabled: Optional[bool] = None,
**kwargs
):
super(CompressionSettings, self).__init__(**kwargs)
self.content_types_to_compress = content_types_to_compress
self.is_compression_enabled = is_compression_enabled
class ContinentsResponse(msrest.serialization.Model):
"""Continents Response.
:param continents:
:type continents: list[~azure.mgmt.cdn.models.ContinentsResponseContinentsItem]
:param country_or_regions:
:type country_or_regions: list[~azure.mgmt.cdn.models.ContinentsResponseCountryOrRegionsItem]
"""
_attribute_map = {
'continents': {'key': 'continents', 'type': '[ContinentsResponseContinentsItem]'},
'country_or_regions': {'key': 'countryOrRegions', 'type': '[ContinentsResponseCountryOrRegionsItem]'},
}
def __init__(
self,
*,
continents: Optional[List["ContinentsResponseContinentsItem"]] = None,
country_or_regions: Optional[List["ContinentsResponseCountryOrRegionsItem"]] = None,
**kwargs
):
super(ContinentsResponse, self).__init__(**kwargs)
self.continents = continents
self.country_or_regions = country_or_regions
class ContinentsResponseContinentsItem(msrest.serialization.Model):
"""ContinentsResponseContinentsItem.
:param id:
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
super(ContinentsResponseContinentsItem, self).__init__(**kwargs)
self.id = id
class ContinentsResponseCountryOrRegionsItem(msrest.serialization.Model):
"""ContinentsResponseCountryOrRegionsItem.
:param id:
:type id: str
:param continent_id:
:type continent_id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'continent_id': {'key': 'continentId', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
continent_id: Optional[str] = None,
**kwargs
):
super(ContinentsResponseCountryOrRegionsItem, self).__init__(**kwargs)
self.id = id
self.continent_id = continent_id
class CookiesMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for Cookies match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleCookiesConditionParameters".
:vartype odata_type: str
:param selector: Name of Cookies to be matched.
:type selector: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.CookiesOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleCookiesConditionParameters"
def __init__(
self,
*,
operator: Union[str, "CookiesOperator"],
selector: Optional[str] = None,
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(CookiesMatchConditionParameters, self).__init__(**kwargs)
self.selector = selector
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class CustomDomain(Resource):
"""Friendly domain name mapping to the endpoint hostname that the customer provides for branding purposes, e.g. www.contoso.com.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param host_name: The host name of the custom domain. Must be a domain name.
:type host_name: str
:ivar resource_state: Resource status of the custom domain. Possible values include:
"Creating", "Active", "Deleting".
:vartype resource_state: str or ~azure.mgmt.cdn.models.CustomDomainResourceState
:ivar custom_https_provisioning_state: Provisioning status of Custom Https of the custom
domain. Possible values include: "Enabling", "Enabled", "Disabling", "Disabled", "Failed".
:vartype custom_https_provisioning_state: str or
~azure.mgmt.cdn.models.CustomHttpsProvisioningState
:ivar custom_https_provisioning_substate: Provisioning substate shows the progress of custom
HTTPS enabling/disabling process step by step. Possible values include:
"SubmittingDomainControlValidationRequest", "PendingDomainControlValidationREquestApproval",
"DomainControlValidationRequestApproved", "DomainControlValidationRequestRejected",
"DomainControlValidationRequestTimedOut", "IssuingCertificate", "DeployingCertificate",
"CertificateDeployed", "DeletingCertificate", "CertificateDeleted".
:vartype custom_https_provisioning_substate: str or
~azure.mgmt.cdn.models.CustomHttpsProvisioningSubstate
:param validation_data: Special validation or data may be required when delivering CDN to some
regions due to local compliance reasons. E.g. ICP license number of a custom domain is required
to deliver content in China.
:type validation_data: str
:ivar provisioning_state: Provisioning status of the custom domain.
:vartype provisioning_state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'resource_state': {'readonly': True},
'custom_https_provisioning_state': {'readonly': True},
'custom_https_provisioning_substate': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
'custom_https_provisioning_state': {'key': 'properties.customHttpsProvisioningState', 'type': 'str'},
'custom_https_provisioning_substate': {'key': 'properties.customHttpsProvisioningSubstate', 'type': 'str'},
'validation_data': {'key': 'properties.validationData', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
host_name: Optional[str] = None,
validation_data: Optional[str] = None,
**kwargs
):
super(CustomDomain, self).__init__(**kwargs)
self.host_name = host_name
self.resource_state = None
self.custom_https_provisioning_state = None
self.custom_https_provisioning_substate = None
self.validation_data = validation_data
self.provisioning_state = None
class CustomDomainListResult(msrest.serialization.Model):
"""Result of the request to list custom domains. It contains a list of custom domain objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN CustomDomains within an endpoint.
:vartype value: list[~azure.mgmt.cdn.models.CustomDomain]
:param next_link: URL to get the next set of custom domain objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[CustomDomain]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(CustomDomainListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class CustomDomainParameters(msrest.serialization.Model):
"""The customDomain JSON object required for custom domain creation or update.
:param host_name: The host name of the custom domain. Must be a domain name.
:type host_name: str
"""
_attribute_map = {
'host_name': {'key': 'properties.hostName', 'type': 'str'},
}
def __init__(
self,
*,
host_name: Optional[str] = None,
**kwargs
):
super(CustomDomainParameters, self).__init__(**kwargs)
self.host_name = host_name
class CustomerCertificate(Certificate):
"""Customer Certificate used for https.
All required parameters must be populated in order to send to Azure.
:param subject: Subject name in the certificate.
:type subject: str
:param expiration_date: Certificate expiration date.
:type expiration_date: str
:param thumbprint: Certificate thumbprint.
:type thumbprint: str
:param version: Certificate version.
:type version: str
:param certificate_authority: Certificate issuing authority.
:type certificate_authority: str
:param certificate_url: Required. Complete Url to the certificate.
:type certificate_url: str
:param use_latest_version: Whether to use the latest version for the certificate.
:type use_latest_version: bool
:param subject_alternative_names: The list of SANs.
:type subject_alternative_names: list[str]
"""
_validation = {
'certificate_url': {'required': True},
}
_attribute_map = {
'subject': {'key': 'subject', 'type': 'str'},
'expiration_date': {'key': 'expirationDate', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'certificate_authority': {'key': 'certificateAuthority', 'type': 'str'},
'certificate_url': {'key': 'certificateUrl', 'type': 'str'},
'use_latest_version': {'key': 'useLatestVersion', 'type': 'bool'},
'subject_alternative_names': {'key': 'subjectAlternativeNames', 'type': '[str]'},
}
def __init__(
self,
*,
certificate_url: str,
subject: Optional[str] = None,
expiration_date: Optional[str] = None,
thumbprint: Optional[str] = None,
version: Optional[str] = None,
certificate_authority: Optional[str] = None,
use_latest_version: Optional[bool] = None,
subject_alternative_names: Optional[List[str]] = None,
**kwargs
):
super(CustomerCertificate, self).__init__(subject=subject, expiration_date=expiration_date, thumbprint=thumbprint, **kwargs)
self.version = version
self.certificate_authority = certificate_authority
self.certificate_url = certificate_url
self.use_latest_version = use_latest_version
self.subject_alternative_names = subject_alternative_names
class SecretParameters(msrest.serialization.Model):
"""The json object containing secret parameters.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: CustomerCertificateParameters, ManagedCertificateParameters, UrlSigningKeyParameters.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of the Secret to create.Constant filled by server. Possible
values include: "UrlSigningKey", "CustomerCertificate", "ManagedCertificate".
:type type: str or ~azure.mgmt.cdn.models.SecretType
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
_subtype_map = {
'type': {'CustomerCertificate': 'CustomerCertificateParameters', 'ManagedCertificate': 'ManagedCertificateParameters', 'UrlSigningKey': 'UrlSigningKeyParameters'}
}
def __init__(
self,
**kwargs
):
super(SecretParameters, self).__init__(**kwargs)
self.type = None # type: Optional[str]
class CustomerCertificateParameters(SecretParameters):
"""Customer Certificate used for https.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of the Secret to create.Constant filled by server. Possible
values include: "UrlSigningKey", "CustomerCertificate", "ManagedCertificate".
:type type: str or ~azure.mgmt.cdn.models.SecretType
:param secret_source: Required. Resource reference to the KV secret.
:type secret_source: ~azure.mgmt.cdn.models.ResourceReference
:param secret_version: Version of the secret to be used.
:type secret_version: str
:param certificate_authority: Certificate issuing authority.
:type certificate_authority: str
:param use_latest_version: Whether to use the latest version for the certificate.
:type use_latest_version: bool
:param subject_alternative_names: The list of SANs.
:type subject_alternative_names: list[str]
"""
_validation = {
'type': {'required': True},
'secret_source': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'secret_source': {'key': 'secretSource', 'type': 'ResourceReference'},
'secret_version': {'key': 'secretVersion', 'type': 'str'},
'certificate_authority': {'key': 'certificateAuthority', 'type': 'str'},
'use_latest_version': {'key': 'useLatestVersion', 'type': 'bool'},
'subject_alternative_names': {'key': 'subjectAlternativeNames', 'type': '[str]'},
}
def __init__(
self,
*,
secret_source: "ResourceReference",
secret_version: Optional[str] = None,
certificate_authority: Optional[str] = None,
use_latest_version: Optional[bool] = None,
subject_alternative_names: Optional[List[str]] = None,
**kwargs
):
super(CustomerCertificateParameters, self).__init__(**kwargs)
self.type = 'CustomerCertificate' # type: str
self.secret_source = secret_source
self.secret_version = secret_version
self.certificate_authority = certificate_authority
self.use_latest_version = use_latest_version
self.subject_alternative_names = subject_alternative_names
class CustomRule(msrest.serialization.Model):
"""Defines the common attributes for a custom rule that can be included in a waf policy.
All required parameters must be populated in order to send to Azure.
:param name: Required. Defines the name of the custom rule.
:type name: str
:param enabled_state: Describes if the custom rule is in enabled or disabled state. Defaults to
Enabled if not specified. Possible values include: "Disabled", "Enabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.CustomRuleEnabledState
:param priority: Required. Defines in what order this rule be evaluated in the overall list of
custom rules.
:type priority: int
:param match_conditions: Required. List of match conditions.
:type match_conditions: list[~azure.mgmt.cdn.models.MatchCondition]
:param action: Required. Describes what action to be applied when rule matches. Possible values
include: "Allow", "Block", "Log", "Redirect".
:type action: str or ~azure.mgmt.cdn.models.ActionType
"""
_validation = {
'name': {'required': True},
'priority': {'required': True, 'maximum': 1000, 'minimum': 0},
'match_conditions': {'required': True},
'action': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'match_conditions': {'key': 'matchConditions', 'type': '[MatchCondition]'},
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
*,
name: str,
priority: int,
match_conditions: List["MatchCondition"],
action: Union[str, "ActionType"],
enabled_state: Optional[Union[str, "CustomRuleEnabledState"]] = None,
**kwargs
):
super(CustomRule, self).__init__(**kwargs)
self.name = name
self.enabled_state = enabled_state
self.priority = priority
self.match_conditions = match_conditions
self.action = action
class CustomRuleList(msrest.serialization.Model):
"""Defines contents of custom rules.
:param rules: List of rules.
:type rules: list[~azure.mgmt.cdn.models.CustomRule]
"""
_attribute_map = {
'rules': {'key': 'rules', 'type': '[CustomRule]'},
}
def __init__(
self,
*,
rules: Optional[List["CustomRule"]] = None,
**kwargs
):
super(CustomRuleList, self).__init__(**kwargs)
self.rules = rules
class DeepCreatedOrigin(msrest.serialization.Model):
"""The main origin of CDN content which is added when creating a CDN endpoint.
All required parameters must be populated in order to send to Azure.
:param name: Required. Origin name which must be unique within the endpoint.
:type name: str
:param host_name: The address of the origin. It can be a domain name, IPv4 address, or IPv6
address. This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param enabled: Origin is enabled for load balancing or not. By default, origin is always
enabled.
:type enabled: bool
:param private_link_alias: The Alias of the Private Link resource. Populating this optional
field indicates that this origin is 'Private'.
:type private_link_alias: str
:param private_link_resource_id: The Resource Id of the Private Link resource. Populating this
optional field indicates that this backend is 'Private'.
:type private_link_resource_id: str
:param private_link_location: The location of the Private Link resource. Required only if
'privateLinkResourceId' is populated.
:type private_link_location: str
:param private_link_approval_message: A custom message to be included in the approval request
to connect to the Private Link.
:type private_link_approval_message: str
"""
_validation = {
'name': {'required': True},
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'http_port': {'key': 'properties.httpPort', 'type': 'int'},
'https_port': {'key': 'properties.httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'weight': {'key': 'properties.weight', 'type': 'int'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'private_link_alias': {'key': 'properties.privateLinkAlias', 'type': 'str'},
'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
'private_link_location': {'key': 'properties.privateLinkLocation', 'type': 'str'},
'private_link_approval_message': {'key': 'properties.privateLinkApprovalMessage', 'type': 'str'},
}
def __init__(
self,
*,
name: str,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
enabled: Optional[bool] = None,
private_link_alias: Optional[str] = None,
private_link_resource_id: Optional[str] = None,
private_link_location: Optional[str] = None,
private_link_approval_message: Optional[str] = None,
**kwargs
):
super(DeepCreatedOrigin, self).__init__(**kwargs)
self.name = name
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.enabled = enabled
self.private_link_alias = private_link_alias
self.private_link_resource_id = private_link_resource_id
self.private_link_location = private_link_location
self.private_link_approval_message = private_link_approval_message
class DeepCreatedOriginGroup(msrest.serialization.Model):
"""The origin group for CDN content which is added when creating a CDN endpoint. Traffic is sent to the origins within the origin group based on origin health.
All required parameters must be populated in order to send to Azure.
:param name: Required. Origin group name which must be unique within the endpoint.
:type name: str
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param origins: The source of the content being delivered via CDN within given origin group.
:type origins: list[~azure.mgmt.cdn.models.ResourceReference]
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses.This property is currently
not supported.
:type response_based_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
"""
_validation = {
'name': {'required': True},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'health_probe_settings': {'key': 'properties.healthProbeSettings', 'type': 'HealthProbeParameters'},
'origins': {'key': 'properties.origins', 'type': '[ResourceReference]'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'properties.trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_origin_error_detection_settings': {'key': 'properties.responseBasedOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
}
def __init__(
self,
*,
name: str,
health_probe_settings: Optional["HealthProbeParameters"] = None,
origins: Optional[List["ResourceReference"]] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
**kwargs
):
super(DeepCreatedOriginGroup, self).__init__(**kwargs)
self.name = name
self.health_probe_settings = health_probe_settings
self.origins = origins
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_origin_error_detection_settings = response_based_origin_error_detection_settings
class DeliveryRule(msrest.serialization.Model):
"""A rule that specifies a set of actions and conditions.
All required parameters must be populated in order to send to Azure.
:param name: Name of the rule.
:type name: str
:param order: Required. The order in which the rules are applied for the endpoint. Possible
values {0,1,2,3,………}. A rule with a lesser order will be applied before a rule with a greater
order. Rule with order 0 is a special rule. It does not require any condition and actions
listed in it will always be applied.
:type order: int
:param conditions: A list of conditions that must be matched for the actions to be executed.
:type conditions: list[~azure.mgmt.cdn.models.DeliveryRuleCondition]
:param actions: Required. A list of actions that are executed when all the conditions of a rule
are satisfied.
:type actions: list[~azure.mgmt.cdn.models.DeliveryRuleAction]
"""
_validation = {
'order': {'required': True},
'actions': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'conditions': {'key': 'conditions', 'type': '[DeliveryRuleCondition]'},
'actions': {'key': 'actions', 'type': '[DeliveryRuleAction]'},
}
def __init__(
self,
*,
order: int,
actions: List["DeliveryRuleAction"],
name: Optional[str] = None,
conditions: Optional[List["DeliveryRuleCondition"]] = None,
**kwargs
):
super(DeliveryRule, self).__init__(**kwargs)
self.name = name
self.order = order
self.conditions = conditions
self.actions = actions
class DeliveryRuleAction(msrest.serialization.Model):
"""An action for the delivery rule.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: DeliveryRuleCacheExpirationAction, DeliveryRuleCacheKeyQueryStringAction, DeliveryRuleRequestHeaderAction, DeliveryRuleResponseHeaderAction, OriginGroupOverrideAction, UrlRedirectAction, UrlRewriteAction, UrlSigningAction.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
_subtype_map = {
'name': {'CacheExpiration': 'DeliveryRuleCacheExpirationAction', 'CacheKeyQueryString': 'DeliveryRuleCacheKeyQueryStringAction', 'ModifyRequestHeader': 'DeliveryRuleRequestHeaderAction', 'ModifyResponseHeader': 'DeliveryRuleResponseHeaderAction', 'OriginGroupOverride': 'OriginGroupOverrideAction', 'UrlRedirect': 'UrlRedirectAction', 'UrlRewrite': 'UrlRewriteAction', 'UrlSigning': 'UrlSigningAction'}
}
def __init__(
self,
**kwargs
):
super(DeliveryRuleAction, self).__init__(**kwargs)
self.name = None # type: Optional[str]
class DeliveryRuleCacheExpirationAction(DeliveryRuleAction):
"""Defines the cache expiration action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.CacheExpirationActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'CacheExpirationActionParameters'},
}
def __init__(
self,
*,
parameters: "CacheExpirationActionParameters",
**kwargs
):
super(DeliveryRuleCacheExpirationAction, self).__init__(**kwargs)
self.name = 'CacheExpiration' # type: str
self.parameters = parameters
class DeliveryRuleCacheKeyQueryStringAction(DeliveryRuleAction):
"""Defines the cache-key query string action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.CacheKeyQueryStringActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'CacheKeyQueryStringActionParameters'},
}
def __init__(
self,
*,
parameters: "CacheKeyQueryStringActionParameters",
**kwargs
):
super(DeliveryRuleCacheKeyQueryStringAction, self).__init__(**kwargs)
self.name = 'CacheKeyQueryString' # type: str
self.parameters = parameters
class DeliveryRuleCondition(msrest.serialization.Model):
"""A condition for the delivery rule.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: DeliveryRuleCookiesCondition, DeliveryRuleHttpVersionCondition, DeliveryRuleIsDeviceCondition, DeliveryRulePostArgsCondition, DeliveryRuleQueryStringCondition, DeliveryRuleRemoteAddressCondition, DeliveryRuleRequestBodyCondition, DeliveryRuleRequestHeaderCondition, DeliveryRuleRequestMethodCondition, DeliveryRuleRequestSchemeCondition, DeliveryRuleRequestUriCondition, DeliveryRuleUrlFileExtensionCondition, DeliveryRuleUrlFileNameCondition, DeliveryRuleUrlPathCondition.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
_subtype_map = {
'name': {'Cookies': 'DeliveryRuleCookiesCondition', 'HttpVersion': 'DeliveryRuleHttpVersionCondition', 'IsDevice': 'DeliveryRuleIsDeviceCondition', 'PostArgs': 'DeliveryRulePostArgsCondition', 'QueryString': 'DeliveryRuleQueryStringCondition', 'RemoteAddress': 'DeliveryRuleRemoteAddressCondition', 'RequestBody': 'DeliveryRuleRequestBodyCondition', 'RequestHeader': 'DeliveryRuleRequestHeaderCondition', 'RequestMethod': 'DeliveryRuleRequestMethodCondition', 'RequestScheme': 'DeliveryRuleRequestSchemeCondition', 'RequestUri': 'DeliveryRuleRequestUriCondition', 'UrlFileExtension': 'DeliveryRuleUrlFileExtensionCondition', 'UrlFileName': 'DeliveryRuleUrlFileNameCondition', 'UrlPath': 'DeliveryRuleUrlPathCondition'}
}
def __init__(
self,
**kwargs
):
super(DeliveryRuleCondition, self).__init__(**kwargs)
self.name = None # type: Optional[str]
class DeliveryRuleCookiesCondition(DeliveryRuleCondition):
"""Defines the Cookies condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.CookiesMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'CookiesMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "CookiesMatchConditionParameters",
**kwargs
):
super(DeliveryRuleCookiesCondition, self).__init__(**kwargs)
self.name = 'Cookies' # type: str
self.parameters = parameters
class DeliveryRuleHttpVersionCondition(DeliveryRuleCondition):
"""Defines the HttpVersion condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.HttpVersionMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'HttpVersionMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "HttpVersionMatchConditionParameters",
**kwargs
):
super(DeliveryRuleHttpVersionCondition, self).__init__(**kwargs)
self.name = 'HttpVersion' # type: str
self.parameters = parameters
class DeliveryRuleIsDeviceCondition(DeliveryRuleCondition):
"""Defines the IsDevice condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.IsDeviceMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'IsDeviceMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "IsDeviceMatchConditionParameters",
**kwargs
):
super(DeliveryRuleIsDeviceCondition, self).__init__(**kwargs)
self.name = 'IsDevice' # type: str
self.parameters = parameters
class DeliveryRulePostArgsCondition(DeliveryRuleCondition):
"""Defines the PostArgs condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.PostArgsMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'PostArgsMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "PostArgsMatchConditionParameters",
**kwargs
):
super(DeliveryRulePostArgsCondition, self).__init__(**kwargs)
self.name = 'PostArgs' # type: str
self.parameters = parameters
class DeliveryRuleQueryStringCondition(DeliveryRuleCondition):
"""Defines the QueryString condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.QueryStringMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'QueryStringMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "QueryStringMatchConditionParameters",
**kwargs
):
super(DeliveryRuleQueryStringCondition, self).__init__(**kwargs)
self.name = 'QueryString' # type: str
self.parameters = parameters
class DeliveryRuleRemoteAddressCondition(DeliveryRuleCondition):
"""Defines the RemoteAddress condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.RemoteAddressMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'RemoteAddressMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "RemoteAddressMatchConditionParameters",
**kwargs
):
super(DeliveryRuleRemoteAddressCondition, self).__init__(**kwargs)
self.name = 'RemoteAddress' # type: str
self.parameters = parameters
class DeliveryRuleRequestBodyCondition(DeliveryRuleCondition):
"""Defines the RequestBody condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.RequestBodyMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'RequestBodyMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "RequestBodyMatchConditionParameters",
**kwargs
):
super(DeliveryRuleRequestBodyCondition, self).__init__(**kwargs)
self.name = 'RequestBody' # type: str
self.parameters = parameters
class DeliveryRuleRequestHeaderAction(DeliveryRuleAction):
"""Defines the request header action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.HeaderActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'HeaderActionParameters'},
}
def __init__(
self,
*,
parameters: "HeaderActionParameters",
**kwargs
):
super(DeliveryRuleRequestHeaderAction, self).__init__(**kwargs)
self.name = 'ModifyRequestHeader' # type: str
self.parameters = parameters
class DeliveryRuleRequestHeaderCondition(DeliveryRuleCondition):
"""Defines the RequestHeader condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.RequestHeaderMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'RequestHeaderMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "RequestHeaderMatchConditionParameters",
**kwargs
):
super(DeliveryRuleRequestHeaderCondition, self).__init__(**kwargs)
self.name = 'RequestHeader' # type: str
self.parameters = parameters
class DeliveryRuleRequestMethodCondition(DeliveryRuleCondition):
"""Defines the RequestMethod condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.RequestMethodMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'RequestMethodMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "RequestMethodMatchConditionParameters",
**kwargs
):
super(DeliveryRuleRequestMethodCondition, self).__init__(**kwargs)
self.name = 'RequestMethod' # type: str
self.parameters = parameters
class DeliveryRuleRequestSchemeCondition(DeliveryRuleCondition):
"""Defines the RequestScheme condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.RequestSchemeMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'RequestSchemeMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "RequestSchemeMatchConditionParameters",
**kwargs
):
super(DeliveryRuleRequestSchemeCondition, self).__init__(**kwargs)
self.name = 'RequestScheme' # type: str
self.parameters = parameters
class DeliveryRuleRequestUriCondition(DeliveryRuleCondition):
"""Defines the RequestUri condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.RequestUriMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'RequestUriMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "RequestUriMatchConditionParameters",
**kwargs
):
super(DeliveryRuleRequestUriCondition, self).__init__(**kwargs)
self.name = 'RequestUri' # type: str
self.parameters = parameters
class DeliveryRuleResponseHeaderAction(DeliveryRuleAction):
"""Defines the response header action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.HeaderActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'HeaderActionParameters'},
}
def __init__(
self,
*,
parameters: "HeaderActionParameters",
**kwargs
):
super(DeliveryRuleResponseHeaderAction, self).__init__(**kwargs)
self.name = 'ModifyResponseHeader' # type: str
self.parameters = parameters
class DeliveryRuleUrlFileExtensionCondition(DeliveryRuleCondition):
"""Defines the UrlFileExtension condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.UrlFileExtensionMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'UrlFileExtensionMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "UrlFileExtensionMatchConditionParameters",
**kwargs
):
super(DeliveryRuleUrlFileExtensionCondition, self).__init__(**kwargs)
self.name = 'UrlFileExtension' # type: str
self.parameters = parameters
class DeliveryRuleUrlFileNameCondition(DeliveryRuleCondition):
"""Defines the UrlFileName condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.UrlFileNameMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'UrlFileNameMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "UrlFileNameMatchConditionParameters",
**kwargs
):
super(DeliveryRuleUrlFileNameCondition, self).__init__(**kwargs)
self.name = 'UrlFileName' # type: str
self.parameters = parameters
class DeliveryRuleUrlPathCondition(DeliveryRuleCondition):
"""Defines the UrlPath condition for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the condition for the delivery rule.Constant filled by
server. Possible values include: "RemoteAddress", "RequestMethod", "QueryString", "PostArgs",
"RequestUri", "RequestHeader", "RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension",
"UrlFileName", "HttpVersion", "Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type name: str or ~azure.mgmt.cdn.models.MatchVariable
:param parameters: Required. Defines the parameters for the condition.
:type parameters: ~azure.mgmt.cdn.models.UrlPathMatchConditionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'UrlPathMatchConditionParameters'},
}
def __init__(
self,
*,
parameters: "UrlPathMatchConditionParameters",
**kwargs
):
super(DeliveryRuleUrlPathCondition, self).__init__(**kwargs)
self.name = 'UrlPath' # type: str
self.parameters = parameters
class DomainValidationProperties(msrest.serialization.Model):
"""The JSON object that contains the properties to validate a domain.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar validation_token: Challenge used for DNS TXT record or file based validation.
:vartype validation_token: str
:ivar expiration_date: The date time that the token expires.
:vartype expiration_date: str
"""
_validation = {
'validation_token': {'readonly': True},
'expiration_date': {'readonly': True},
}
_attribute_map = {
'validation_token': {'key': 'validationToken', 'type': 'str'},
'expiration_date': {'key': 'expirationDate', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(DomainValidationProperties, self).__init__(**kwargs)
self.validation_token = None
self.expiration_date = None
class EdgeNode(Resource):
"""Edgenode is a global Point of Presence (POP) location used to deliver CDN content to end users.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param ip_address_groups: List of ip address groups.
:type ip_address_groups: list[~azure.mgmt.cdn.models.IpAddressGroup]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'ip_address_groups': {'key': 'properties.ipAddressGroups', 'type': '[IpAddressGroup]'},
}
def __init__(
self,
*,
ip_address_groups: Optional[List["IpAddressGroup"]] = None,
**kwargs
):
super(EdgeNode, self).__init__(**kwargs)
self.ip_address_groups = ip_address_groups
class EdgenodeResult(msrest.serialization.Model):
"""Result of the request to list CDN edgenodes. It contains a list of ip address group and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: Edge node of CDN service.
:vartype value: list[~azure.mgmt.cdn.models.EdgeNode]
:param next_link: URL to get the next set of edgenode list results if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[EdgeNode]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(EdgenodeResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class Endpoint(TrackedResource):
"""CDN endpoint is the entity within a CDN profile containing configuration information such as origin, protocol, content caching and delivery behavior. The CDN endpoint uses the URL format :code:`<endpointname>`.azureedge.net.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param location: Required. Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param origin_path: A directory path on the origin that CDN can use to retrieve content from,
e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param content_types_to_compress: List of content types on which compression applies. The value
should be a valid MIME type.
:type content_types_to_compress: list[str]
:param origin_host_header: The host header value sent to the origin with each request. This
property at Endpoint is only allowed when endpoint uses single origin and can be overridden by
the same property specified at origin.If you leave this blank, the request hostname determines
this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this
host header value to match the origin hostname by default.
:type origin_host_header: str
:param is_compression_enabled: Indicates whether content compression is enabled on CDN. Default
value is false. If compression is enabled, content will be served as compressed if user
requests for a compressed version. Content won't be compressed on CDN when requested content is
smaller than 1 byte or larger than 1 MB.
:type is_compression_enabled: bool
:param is_http_allowed: Indicates whether HTTP traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_http_allowed: bool
:param is_https_allowed: Indicates whether HTTPS traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_https_allowed: bool
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "BypassCaching", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or ~azure.mgmt.cdn.models.QueryStringCachingBehavior
:param optimization_type: Specifies what scenario the customer wants this CDN endpoint to
optimize for, e.g. Download, Media services. With this information, CDN can apply scenario
driven optimization. Possible values include: "GeneralWebDelivery", "GeneralMediaStreaming",
"VideoOnDemandMediaStreaming", "LargeFileDownload", "DynamicSiteAcceleration".
:type optimization_type: str or ~azure.mgmt.cdn.models.OptimizationType
:param probe_path: Path to a file hosted on the origin which helps accelerate delivery of the
dynamic content and calculate the most optimal routes for the CDN. This is relative to the
origin path. This property is only relevant when using a single origin.
:type probe_path: str
:param geo_filters: List of rules defining the user's geo access within a CDN endpoint. Each
geo filter defines an access rule to a specified path or content, e.g. block APAC for path
/pictures/.
:type geo_filters: list[~azure.mgmt.cdn.models.GeoFilter]
:param default_origin_group: A reference to the origin group.
:type default_origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param url_signing_keys: List of keys used to validate the signed URL hashes.
:type url_signing_keys: list[~azure.mgmt.cdn.models.UrlSigningKey]
:param delivery_policy: A policy that specifies the delivery rules to be used for an endpoint.
:type delivery_policy: ~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersDeliveryPolicy
:param web_application_firewall_policy_link: Defines the Web Application Firewall policy for
the endpoint (if applicable).
:type web_application_firewall_policy_link:
~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink
:ivar host_name: The host name of the endpoint structured as {endpointName}.{DNSZone}, e.g.
contoso.azureedge.net.
:vartype host_name: str
:param origins: The source of the content being delivered via CDN.
:type origins: list[~azure.mgmt.cdn.models.DeepCreatedOrigin]
:param origin_groups: The origin groups comprising of origins that are used for load balancing
the traffic based on availability.
:type origin_groups: list[~azure.mgmt.cdn.models.DeepCreatedOriginGroup]
:ivar resource_state: Resource status of the endpoint. Possible values include: "Creating",
"Deleting", "Running", "Starting", "Stopped", "Stopping".
:vartype resource_state: str or ~azure.mgmt.cdn.models.EndpointResourceState
:ivar provisioning_state: Provisioning status of the endpoint.
:vartype provisioning_state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'location': {'required': True},
'host_name': {'readonly': True},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'origin_path': {'key': 'properties.originPath', 'type': 'str'},
'content_types_to_compress': {'key': 'properties.contentTypesToCompress', 'type': '[str]'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'is_compression_enabled': {'key': 'properties.isCompressionEnabled', 'type': 'bool'},
'is_http_allowed': {'key': 'properties.isHttpAllowed', 'type': 'bool'},
'is_https_allowed': {'key': 'properties.isHttpsAllowed', 'type': 'bool'},
'query_string_caching_behavior': {'key': 'properties.queryStringCachingBehavior', 'type': 'str'},
'optimization_type': {'key': 'properties.optimizationType', 'type': 'str'},
'probe_path': {'key': 'properties.probePath', 'type': 'str'},
'geo_filters': {'key': 'properties.geoFilters', 'type': '[GeoFilter]'},
'default_origin_group': {'key': 'properties.defaultOriginGroup', 'type': 'ResourceReference'},
'url_signing_keys': {'key': 'properties.urlSigningKeys', 'type': '[UrlSigningKey]'},
'delivery_policy': {'key': 'properties.deliveryPolicy', 'type': 'EndpointPropertiesUpdateParametersDeliveryPolicy'},
'web_application_firewall_policy_link': {'key': 'properties.webApplicationFirewallPolicyLink', 'type': 'EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'origins': {'key': 'properties.origins', 'type': '[DeepCreatedOrigin]'},
'origin_groups': {'key': 'properties.originGroups', 'type': '[DeepCreatedOriginGroup]'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
tags: Optional[Dict[str, str]] = None,
origin_path: Optional[str] = None,
content_types_to_compress: Optional[List[str]] = None,
origin_host_header: Optional[str] = None,
is_compression_enabled: Optional[bool] = None,
is_http_allowed: Optional[bool] = None,
is_https_allowed: Optional[bool] = None,
query_string_caching_behavior: Optional[Union[str, "QueryStringCachingBehavior"]] = None,
optimization_type: Optional[Union[str, "OptimizationType"]] = None,
probe_path: Optional[str] = None,
geo_filters: Optional[List["GeoFilter"]] = None,
default_origin_group: Optional["ResourceReference"] = None,
url_signing_keys: Optional[List["UrlSigningKey"]] = None,
delivery_policy: Optional["EndpointPropertiesUpdateParametersDeliveryPolicy"] = None,
web_application_firewall_policy_link: Optional["EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink"] = None,
origins: Optional[List["DeepCreatedOrigin"]] = None,
origin_groups: Optional[List["DeepCreatedOriginGroup"]] = None,
**kwargs
):
super(Endpoint, self).__init__(location=location, tags=tags, **kwargs)
self.origin_path = origin_path
self.content_types_to_compress = content_types_to_compress
self.origin_host_header = origin_host_header
self.is_compression_enabled = is_compression_enabled
self.is_http_allowed = is_http_allowed
self.is_https_allowed = is_https_allowed
self.query_string_caching_behavior = query_string_caching_behavior
self.optimization_type = optimization_type
self.probe_path = probe_path
self.geo_filters = geo_filters
self.default_origin_group = default_origin_group
self.url_signing_keys = url_signing_keys
self.delivery_policy = delivery_policy
self.web_application_firewall_policy_link = web_application_firewall_policy_link
self.host_name = None
self.origins = origins
self.origin_groups = origin_groups
self.resource_state = None
self.provisioning_state = None
class EndpointListResult(msrest.serialization.Model):
"""Result of the request to list endpoints. It contains a list of endpoint objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN endpoints within a profile.
:vartype value: list[~azure.mgmt.cdn.models.Endpoint]
:param next_link: URL to get the next set of endpoint objects if there is any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Endpoint]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(EndpointListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class EndpointPropertiesUpdateParameters(msrest.serialization.Model):
"""The JSON object containing endpoint update parameters.
:param origin_path: A directory path on the origin that CDN can use to retrieve content from,
e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param content_types_to_compress: List of content types on which compression applies. The value
should be a valid MIME type.
:type content_types_to_compress: list[str]
:param origin_host_header: The host header value sent to the origin with each request. This
property at Endpoint is only allowed when endpoint uses single origin and can be overridden by
the same property specified at origin.If you leave this blank, the request hostname determines
this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this
host header value to match the origin hostname by default.
:type origin_host_header: str
:param is_compression_enabled: Indicates whether content compression is enabled on CDN. Default
value is false. If compression is enabled, content will be served as compressed if user
requests for a compressed version. Content won't be compressed on CDN when requested content is
smaller than 1 byte or larger than 1 MB.
:type is_compression_enabled: bool
:param is_http_allowed: Indicates whether HTTP traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_http_allowed: bool
:param is_https_allowed: Indicates whether HTTPS traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_https_allowed: bool
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "BypassCaching", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or ~azure.mgmt.cdn.models.QueryStringCachingBehavior
:param optimization_type: Specifies what scenario the customer wants this CDN endpoint to
optimize for, e.g. Download, Media services. With this information, CDN can apply scenario
driven optimization. Possible values include: "GeneralWebDelivery", "GeneralMediaStreaming",
"VideoOnDemandMediaStreaming", "LargeFileDownload", "DynamicSiteAcceleration".
:type optimization_type: str or ~azure.mgmt.cdn.models.OptimizationType
:param probe_path: Path to a file hosted on the origin which helps accelerate delivery of the
dynamic content and calculate the most optimal routes for the CDN. This is relative to the
origin path. This property is only relevant when using a single origin.
:type probe_path: str
:param geo_filters: List of rules defining the user's geo access within a CDN endpoint. Each
geo filter defines an access rule to a specified path or content, e.g. block APAC for path
/pictures/.
:type geo_filters: list[~azure.mgmt.cdn.models.GeoFilter]
:param default_origin_group: A reference to the origin group.
:type default_origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param url_signing_keys: List of keys used to validate the signed URL hashes.
:type url_signing_keys: list[~azure.mgmt.cdn.models.UrlSigningKey]
:param delivery_policy: A policy that specifies the delivery rules to be used for an endpoint.
:type delivery_policy: ~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersDeliveryPolicy
:param web_application_firewall_policy_link: Defines the Web Application Firewall policy for
the endpoint (if applicable).
:type web_application_firewall_policy_link:
~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink
"""
_attribute_map = {
'origin_path': {'key': 'originPath', 'type': 'str'},
'content_types_to_compress': {'key': 'contentTypesToCompress', 'type': '[str]'},
'origin_host_header': {'key': 'originHostHeader', 'type': 'str'},
'is_compression_enabled': {'key': 'isCompressionEnabled', 'type': 'bool'},
'is_http_allowed': {'key': 'isHttpAllowed', 'type': 'bool'},
'is_https_allowed': {'key': 'isHttpsAllowed', 'type': 'bool'},
'query_string_caching_behavior': {'key': 'queryStringCachingBehavior', 'type': 'str'},
'optimization_type': {'key': 'optimizationType', 'type': 'str'},
'probe_path': {'key': 'probePath', 'type': 'str'},
'geo_filters': {'key': 'geoFilters', 'type': '[GeoFilter]'},
'default_origin_group': {'key': 'defaultOriginGroup', 'type': 'ResourceReference'},
'url_signing_keys': {'key': 'urlSigningKeys', 'type': '[UrlSigningKey]'},
'delivery_policy': {'key': 'deliveryPolicy', 'type': 'EndpointPropertiesUpdateParametersDeliveryPolicy'},
'web_application_firewall_policy_link': {'key': 'webApplicationFirewallPolicyLink', 'type': 'EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink'},
}
def __init__(
self,
*,
origin_path: Optional[str] = None,
content_types_to_compress: Optional[List[str]] = None,
origin_host_header: Optional[str] = None,
is_compression_enabled: Optional[bool] = None,
is_http_allowed: Optional[bool] = None,
is_https_allowed: Optional[bool] = None,
query_string_caching_behavior: Optional[Union[str, "QueryStringCachingBehavior"]] = None,
optimization_type: Optional[Union[str, "OptimizationType"]] = None,
probe_path: Optional[str] = None,
geo_filters: Optional[List["GeoFilter"]] = None,
default_origin_group: Optional["ResourceReference"] = None,
url_signing_keys: Optional[List["UrlSigningKey"]] = None,
delivery_policy: Optional["EndpointPropertiesUpdateParametersDeliveryPolicy"] = None,
web_application_firewall_policy_link: Optional["EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink"] = None,
**kwargs
):
super(EndpointPropertiesUpdateParameters, self).__init__(**kwargs)
self.origin_path = origin_path
self.content_types_to_compress = content_types_to_compress
self.origin_host_header = origin_host_header
self.is_compression_enabled = is_compression_enabled
self.is_http_allowed = is_http_allowed
self.is_https_allowed = is_https_allowed
self.query_string_caching_behavior = query_string_caching_behavior
self.optimization_type = optimization_type
self.probe_path = probe_path
self.geo_filters = geo_filters
self.default_origin_group = default_origin_group
self.url_signing_keys = url_signing_keys
self.delivery_policy = delivery_policy
self.web_application_firewall_policy_link = web_application_firewall_policy_link
class EndpointProperties(EndpointPropertiesUpdateParameters):
"""The JSON object that contains the properties required to create an endpoint.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param origin_path: A directory path on the origin that CDN can use to retrieve content from,
e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param content_types_to_compress: List of content types on which compression applies. The value
should be a valid MIME type.
:type content_types_to_compress: list[str]
:param origin_host_header: The host header value sent to the origin with each request. This
property at Endpoint is only allowed when endpoint uses single origin and can be overridden by
the same property specified at origin.If you leave this blank, the request hostname determines
this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this
host header value to match the origin hostname by default.
:type origin_host_header: str
:param is_compression_enabled: Indicates whether content compression is enabled on CDN. Default
value is false. If compression is enabled, content will be served as compressed if user
requests for a compressed version. Content won't be compressed on CDN when requested content is
smaller than 1 byte or larger than 1 MB.
:type is_compression_enabled: bool
:param is_http_allowed: Indicates whether HTTP traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_http_allowed: bool
:param is_https_allowed: Indicates whether HTTPS traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_https_allowed: bool
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "BypassCaching", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or ~azure.mgmt.cdn.models.QueryStringCachingBehavior
:param optimization_type: Specifies what scenario the customer wants this CDN endpoint to
optimize for, e.g. Download, Media services. With this information, CDN can apply scenario
driven optimization. Possible values include: "GeneralWebDelivery", "GeneralMediaStreaming",
"VideoOnDemandMediaStreaming", "LargeFileDownload", "DynamicSiteAcceleration".
:type optimization_type: str or ~azure.mgmt.cdn.models.OptimizationType
:param probe_path: Path to a file hosted on the origin which helps accelerate delivery of the
dynamic content and calculate the most optimal routes for the CDN. This is relative to the
origin path. This property is only relevant when using a single origin.
:type probe_path: str
:param geo_filters: List of rules defining the user's geo access within a CDN endpoint. Each
geo filter defines an access rule to a specified path or content, e.g. block APAC for path
/pictures/.
:type geo_filters: list[~azure.mgmt.cdn.models.GeoFilter]
:param default_origin_group: A reference to the origin group.
:type default_origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param url_signing_keys: List of keys used to validate the signed URL hashes.
:type url_signing_keys: list[~azure.mgmt.cdn.models.UrlSigningKey]
:param delivery_policy: A policy that specifies the delivery rules to be used for an endpoint.
:type delivery_policy: ~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersDeliveryPolicy
:param web_application_firewall_policy_link: Defines the Web Application Firewall policy for
the endpoint (if applicable).
:type web_application_firewall_policy_link:
~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink
:ivar host_name: The host name of the endpoint structured as {endpointName}.{DNSZone}, e.g.
contoso.azureedge.net.
:vartype host_name: str
:param origins: Required. The source of the content being delivered via CDN.
:type origins: list[~azure.mgmt.cdn.models.DeepCreatedOrigin]
:param origin_groups: The origin groups comprising of origins that are used for load balancing
the traffic based on availability.
:type origin_groups: list[~azure.mgmt.cdn.models.DeepCreatedOriginGroup]
:ivar resource_state: Resource status of the endpoint. Possible values include: "Creating",
"Deleting", "Running", "Starting", "Stopped", "Stopping".
:vartype resource_state: str or ~azure.mgmt.cdn.models.EndpointResourceState
:ivar provisioning_state: Provisioning status of the endpoint.
:vartype provisioning_state: str
"""
_validation = {
'host_name': {'readonly': True},
'origins': {'required': True},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'origin_path': {'key': 'originPath', 'type': 'str'},
'content_types_to_compress': {'key': 'contentTypesToCompress', 'type': '[str]'},
'origin_host_header': {'key': 'originHostHeader', 'type': 'str'},
'is_compression_enabled': {'key': 'isCompressionEnabled', 'type': 'bool'},
'is_http_allowed': {'key': 'isHttpAllowed', 'type': 'bool'},
'is_https_allowed': {'key': 'isHttpsAllowed', 'type': 'bool'},
'query_string_caching_behavior': {'key': 'queryStringCachingBehavior', 'type': 'str'},
'optimization_type': {'key': 'optimizationType', 'type': 'str'},
'probe_path': {'key': 'probePath', 'type': 'str'},
'geo_filters': {'key': 'geoFilters', 'type': '[GeoFilter]'},
'default_origin_group': {'key': 'defaultOriginGroup', 'type': 'ResourceReference'},
'url_signing_keys': {'key': 'urlSigningKeys', 'type': '[UrlSigningKey]'},
'delivery_policy': {'key': 'deliveryPolicy', 'type': 'EndpointPropertiesUpdateParametersDeliveryPolicy'},
'web_application_firewall_policy_link': {'key': 'webApplicationFirewallPolicyLink', 'type': 'EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink'},
'host_name': {'key': 'hostName', 'type': 'str'},
'origins': {'key': 'origins', 'type': '[DeepCreatedOrigin]'},
'origin_groups': {'key': 'originGroups', 'type': '[DeepCreatedOriginGroup]'},
'resource_state': {'key': 'resourceState', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
origins: List["DeepCreatedOrigin"],
origin_path: Optional[str] = None,
content_types_to_compress: Optional[List[str]] = None,
origin_host_header: Optional[str] = None,
is_compression_enabled: Optional[bool] = None,
is_http_allowed: Optional[bool] = None,
is_https_allowed: Optional[bool] = None,
query_string_caching_behavior: Optional[Union[str, "QueryStringCachingBehavior"]] = None,
optimization_type: Optional[Union[str, "OptimizationType"]] = None,
probe_path: Optional[str] = None,
geo_filters: Optional[List["GeoFilter"]] = None,
default_origin_group: Optional["ResourceReference"] = None,
url_signing_keys: Optional[List["UrlSigningKey"]] = None,
delivery_policy: Optional["EndpointPropertiesUpdateParametersDeliveryPolicy"] = None,
web_application_firewall_policy_link: Optional["EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink"] = None,
origin_groups: Optional[List["DeepCreatedOriginGroup"]] = None,
**kwargs
):
super(EndpointProperties, self).__init__(origin_path=origin_path, content_types_to_compress=content_types_to_compress, origin_host_header=origin_host_header, is_compression_enabled=is_compression_enabled, is_http_allowed=is_http_allowed, is_https_allowed=is_https_allowed, query_string_caching_behavior=query_string_caching_behavior, optimization_type=optimization_type, probe_path=probe_path, geo_filters=geo_filters, default_origin_group=default_origin_group, url_signing_keys=url_signing_keys, delivery_policy=delivery_policy, web_application_firewall_policy_link=web_application_firewall_policy_link, **kwargs)
self.host_name = None
self.origins = origins
self.origin_groups = origin_groups
self.resource_state = None
self.provisioning_state = None
class EndpointPropertiesUpdateParametersDeliveryPolicy(msrest.serialization.Model):
"""A policy that specifies the delivery rules to be used for an endpoint.
All required parameters must be populated in order to send to Azure.
:param description: User-friendly description of the policy.
:type description: str
:param rules: Required. A list of the delivery rules.
:type rules: list[~azure.mgmt.cdn.models.DeliveryRule]
"""
_validation = {
'rules': {'required': True},
}
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[DeliveryRule]'},
}
def __init__(
self,
*,
rules: List["DeliveryRule"],
description: Optional[str] = None,
**kwargs
):
super(EndpointPropertiesUpdateParametersDeliveryPolicy, self).__init__(**kwargs)
self.description = description
self.rules = rules
class EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink(msrest.serialization.Model):
"""Defines the Web Application Firewall policy for the endpoint (if applicable).
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
super(EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink, self).__init__(**kwargs)
self.id = id
class EndpointUpdateParameters(msrest.serialization.Model):
"""Properties required to create or update an endpoint.
:param tags: A set of tags. Endpoint tags.
:type tags: dict[str, str]
:param origin_path: A directory path on the origin that CDN can use to retrieve content from,
e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param content_types_to_compress: List of content types on which compression applies. The value
should be a valid MIME type.
:type content_types_to_compress: list[str]
:param origin_host_header: The host header value sent to the origin with each request. This
property at Endpoint is only allowed when endpoint uses single origin and can be overridden by
the same property specified at origin.If you leave this blank, the request hostname determines
this value. Azure CDN origins, such as Web Apps, Blob Storage, and Cloud Services require this
host header value to match the origin hostname by default.
:type origin_host_header: str
:param is_compression_enabled: Indicates whether content compression is enabled on CDN. Default
value is false. If compression is enabled, content will be served as compressed if user
requests for a compressed version. Content won't be compressed on CDN when requested content is
smaller than 1 byte or larger than 1 MB.
:type is_compression_enabled: bool
:param is_http_allowed: Indicates whether HTTP traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_http_allowed: bool
:param is_https_allowed: Indicates whether HTTPS traffic is allowed on the endpoint. Default
value is true. At least one protocol (HTTP or HTTPS) must be allowed.
:type is_https_allowed: bool
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "BypassCaching", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or ~azure.mgmt.cdn.models.QueryStringCachingBehavior
:param optimization_type: Specifies what scenario the customer wants this CDN endpoint to
optimize for, e.g. Download, Media services. With this information, CDN can apply scenario
driven optimization. Possible values include: "GeneralWebDelivery", "GeneralMediaStreaming",
"VideoOnDemandMediaStreaming", "LargeFileDownload", "DynamicSiteAcceleration".
:type optimization_type: str or ~azure.mgmt.cdn.models.OptimizationType
:param probe_path: Path to a file hosted on the origin which helps accelerate delivery of the
dynamic content and calculate the most optimal routes for the CDN. This is relative to the
origin path. This property is only relevant when using a single origin.
:type probe_path: str
:param geo_filters: List of rules defining the user's geo access within a CDN endpoint. Each
geo filter defines an access rule to a specified path or content, e.g. block APAC for path
/pictures/.
:type geo_filters: list[~azure.mgmt.cdn.models.GeoFilter]
:param default_origin_group: A reference to the origin group.
:type default_origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param url_signing_keys: List of keys used to validate the signed URL hashes.
:type url_signing_keys: list[~azure.mgmt.cdn.models.UrlSigningKey]
:param delivery_policy: A policy that specifies the delivery rules to be used for an endpoint.
:type delivery_policy: ~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersDeliveryPolicy
:param web_application_firewall_policy_link: Defines the Web Application Firewall policy for
the endpoint (if applicable).
:type web_application_firewall_policy_link:
~azure.mgmt.cdn.models.EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'origin_path': {'key': 'properties.originPath', 'type': 'str'},
'content_types_to_compress': {'key': 'properties.contentTypesToCompress', 'type': '[str]'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'is_compression_enabled': {'key': 'properties.isCompressionEnabled', 'type': 'bool'},
'is_http_allowed': {'key': 'properties.isHttpAllowed', 'type': 'bool'},
'is_https_allowed': {'key': 'properties.isHttpsAllowed', 'type': 'bool'},
'query_string_caching_behavior': {'key': 'properties.queryStringCachingBehavior', 'type': 'str'},
'optimization_type': {'key': 'properties.optimizationType', 'type': 'str'},
'probe_path': {'key': 'properties.probePath', 'type': 'str'},
'geo_filters': {'key': 'properties.geoFilters', 'type': '[GeoFilter]'},
'default_origin_group': {'key': 'properties.defaultOriginGroup', 'type': 'ResourceReference'},
'url_signing_keys': {'key': 'properties.urlSigningKeys', 'type': '[UrlSigningKey]'},
'delivery_policy': {'key': 'properties.deliveryPolicy', 'type': 'EndpointPropertiesUpdateParametersDeliveryPolicy'},
'web_application_firewall_policy_link': {'key': 'properties.webApplicationFirewallPolicyLink', 'type': 'EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
origin_path: Optional[str] = None,
content_types_to_compress: Optional[List[str]] = None,
origin_host_header: Optional[str] = None,
is_compression_enabled: Optional[bool] = None,
is_http_allowed: Optional[bool] = None,
is_https_allowed: Optional[bool] = None,
query_string_caching_behavior: Optional[Union[str, "QueryStringCachingBehavior"]] = None,
optimization_type: Optional[Union[str, "OptimizationType"]] = None,
probe_path: Optional[str] = None,
geo_filters: Optional[List["GeoFilter"]] = None,
default_origin_group: Optional["ResourceReference"] = None,
url_signing_keys: Optional[List["UrlSigningKey"]] = None,
delivery_policy: Optional["EndpointPropertiesUpdateParametersDeliveryPolicy"] = None,
web_application_firewall_policy_link: Optional["EndpointPropertiesUpdateParametersWebApplicationFirewallPolicyLink"] = None,
**kwargs
):
super(EndpointUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.origin_path = origin_path
self.content_types_to_compress = content_types_to_compress
self.origin_host_header = origin_host_header
self.is_compression_enabled = is_compression_enabled
self.is_http_allowed = is_http_allowed
self.is_https_allowed = is_https_allowed
self.query_string_caching_behavior = query_string_caching_behavior
self.optimization_type = optimization_type
self.probe_path = probe_path
self.geo_filters = geo_filters
self.default_origin_group = default_origin_group
self.url_signing_keys = url_signing_keys
self.delivery_policy = delivery_policy
self.web_application_firewall_policy_link = web_application_firewall_policy_link
class ErrorResponse(msrest.serialization.Model):
"""Error response indicates CDN service is not able to process the incoming request. The reason is provided in the error message.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Error code.
:vartype code: str
:ivar message: Error message indicating why the operation failed.
:vartype message: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorResponse, self).__init__(**kwargs)
self.code = None
self.message = None
class GeoFilter(msrest.serialization.Model):
"""Rules defining user's geo access within a CDN endpoint.
All required parameters must be populated in order to send to Azure.
:param relative_path: Required. Relative path applicable to geo filter. (e.g. '/mypictures',
'/mypicture/kitty.jpg', and etc.).
:type relative_path: str
:param action: Required. Action of the geo filter, i.e. allow or block access. Possible values
include: "Block", "Allow".
:type action: str or ~azure.mgmt.cdn.models.GeoFilterActions
:param country_codes: Required. Two letter country codes defining user country access in a geo
filter, e.g. AU, MX, US.
:type country_codes: list[str]
"""
_validation = {
'relative_path': {'required': True},
'action': {'required': True},
'country_codes': {'required': True},
}
_attribute_map = {
'relative_path': {'key': 'relativePath', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
'country_codes': {'key': 'countryCodes', 'type': '[str]'},
}
def __init__(
self,
*,
relative_path: str,
action: Union[str, "GeoFilterActions"],
country_codes: List[str],
**kwargs
):
super(GeoFilter, self).__init__(**kwargs)
self.relative_path = relative_path
self.action = action
self.country_codes = country_codes
class HeaderActionParameters(msrest.serialization.Model):
"""Defines the parameters for the request header action.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleHeaderActionParameters".
:vartype odata_type: str
:param header_action: Required. Action to perform. Possible values include: "Append",
"Overwrite", "Delete".
:type header_action: str or ~azure.mgmt.cdn.models.HeaderAction
:param header_name: Required. Name of the header to modify.
:type header_name: str
:param value: Value for the specified action.
:type value: str
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'header_action': {'required': True},
'header_name': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'header_action': {'key': 'headerAction', 'type': 'str'},
'header_name': {'key': 'headerName', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleHeaderActionParameters"
def __init__(
self,
*,
header_action: Union[str, "HeaderAction"],
header_name: str,
value: Optional[str] = None,
**kwargs
):
super(HeaderActionParameters, self).__init__(**kwargs)
self.header_action = header_action
self.header_name = header_name
self.value = value
class HealthProbeParameters(msrest.serialization.Model):
"""The JSON object that contains the properties to send health probes to origin.
:param probe_path: The path relative to the origin that is used to determine the health of the
origin.
:type probe_path: str
:param probe_request_type: The type of health probe request that is made. Possible values
include: "NotSet", "GET", "HEAD".
:type probe_request_type: str or ~azure.mgmt.cdn.models.HealthProbeRequestType
:param probe_protocol: Protocol to use for health probe. Possible values include: "NotSet",
"Http", "Https".
:type probe_protocol: str or ~azure.mgmt.cdn.models.ProbeProtocol
:param probe_interval_in_seconds: The number of seconds between health probes.Default is
240sec.
:type probe_interval_in_seconds: int
"""
_validation = {
'probe_interval_in_seconds': {'maximum': 255, 'minimum': 1},
}
_attribute_map = {
'probe_path': {'key': 'probePath', 'type': 'str'},
'probe_request_type': {'key': 'probeRequestType', 'type': 'str'},
'probe_protocol': {'key': 'probeProtocol', 'type': 'str'},
'probe_interval_in_seconds': {'key': 'probeIntervalInSeconds', 'type': 'int'},
}
def __init__(
self,
*,
probe_path: Optional[str] = None,
probe_request_type: Optional[Union[str, "HealthProbeRequestType"]] = None,
probe_protocol: Optional[Union[str, "ProbeProtocol"]] = None,
probe_interval_in_seconds: Optional[int] = None,
**kwargs
):
super(HealthProbeParameters, self).__init__(**kwargs)
self.probe_path = probe_path
self.probe_request_type = probe_request_type
self.probe_protocol = probe_protocol
self.probe_interval_in_seconds = probe_interval_in_seconds
class HttpErrorRangeParameters(msrest.serialization.Model):
"""The JSON object that represents the range for http status codes.
:param begin: The inclusive start of the http status code range.
:type begin: int
:param end: The inclusive end of the http status code range.
:type end: int
"""
_validation = {
'begin': {'maximum': 999, 'minimum': 100},
'end': {'maximum': 999, 'minimum': 100},
}
_attribute_map = {
'begin': {'key': 'begin', 'type': 'int'},
'end': {'key': 'end', 'type': 'int'},
}
def __init__(
self,
*,
begin: Optional[int] = None,
end: Optional[int] = None,
**kwargs
):
super(HttpErrorRangeParameters, self).__init__(**kwargs)
self.begin = begin
self.end = end
class HttpVersionMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for HttpVersion match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleHttpVersionConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Equal".
:type operator: str or ~azure.mgmt.cdn.models.HttpVersionOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleHttpVersionConditionParameters"
def __init__(
self,
*,
operator: Union[str, "HttpVersionOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
**kwargs
):
super(HttpVersionMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
class IpAddressGroup(msrest.serialization.Model):
"""CDN Ip address group.
:param delivery_region: The delivery region of the ip address group.
:type delivery_region: str
:param ipv4_addresses: The list of ip v4 addresses.
:type ipv4_addresses: list[~azure.mgmt.cdn.models.CidrIpAddress]
:param ipv6_addresses: The list of ip v6 addresses.
:type ipv6_addresses: list[~azure.mgmt.cdn.models.CidrIpAddress]
"""
_attribute_map = {
'delivery_region': {'key': 'deliveryRegion', 'type': 'str'},
'ipv4_addresses': {'key': 'ipv4Addresses', 'type': '[CidrIpAddress]'},
'ipv6_addresses': {'key': 'ipv6Addresses', 'type': '[CidrIpAddress]'},
}
def __init__(
self,
*,
delivery_region: Optional[str] = None,
ipv4_addresses: Optional[List["CidrIpAddress"]] = None,
ipv6_addresses: Optional[List["CidrIpAddress"]] = None,
**kwargs
):
super(IpAddressGroup, self).__init__(**kwargs)
self.delivery_region = delivery_region
self.ipv4_addresses = ipv4_addresses
self.ipv6_addresses = ipv6_addresses
class IsDeviceMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for IsDevice match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleIsDeviceConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Equal".
:type operator: str or ~azure.mgmt.cdn.models.IsDeviceOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str or
~azure.mgmt.cdn.models.IsDeviceMatchConditionParametersMatchValuesItem]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleIsDeviceConditionParameters"
def __init__(
self,
*,
operator: Union[str, "IsDeviceOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[Union[str, "IsDeviceMatchConditionParametersMatchValuesItem"]]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(IsDeviceMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class KeyVaultCertificateSourceParameters(msrest.serialization.Model):
"""Describes the parameters for using a user's KeyVault certificate for securing custom domain.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.KeyVaultCertificateSourceParameters".
:vartype odata_type: str
:param subscription_id: Required. Subscription Id of the user's Key Vault containing the SSL
certificate.
:type subscription_id: str
:param resource_group_name: Required. Resource group of the user's Key Vault containing the SSL
certificate.
:type resource_group_name: str
:param vault_name: Required. The name of the user's Key Vault containing the SSL certificate.
:type vault_name: str
:param secret_name: Required. The name of Key Vault Secret (representing the full certificate
PFX) in Key Vault.
:type secret_name: str
:param secret_version: The version(GUID) of Key Vault Secret in Key Vault.
:type secret_version: str
:param update_rule: Required. Describes the action that shall be taken when the certificate is
updated in Key Vault. Possible values include: "NoAction".
:type update_rule: str or ~azure.mgmt.cdn.models.UpdateRule
:param delete_rule: Required. Describes the action that shall be taken when the certificate is
removed from Key Vault. Possible values include: "NoAction".
:type delete_rule: str or ~azure.mgmt.cdn.models.DeleteRule
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'subscription_id': {'required': True},
'resource_group_name': {'required': True},
'vault_name': {'required': True},
'secret_name': {'required': True},
'update_rule': {'required': True},
'delete_rule': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'secret_name': {'key': 'secretName', 'type': 'str'},
'secret_version': {'key': 'secretVersion', 'type': 'str'},
'update_rule': {'key': 'updateRule', 'type': 'str'},
'delete_rule': {'key': 'deleteRule', 'type': 'str'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.KeyVaultCertificateSourceParameters"
def __init__(
self,
*,
subscription_id: str,
resource_group_name: str,
vault_name: str,
secret_name: str,
update_rule: Union[str, "UpdateRule"],
delete_rule: Union[str, "DeleteRule"],
secret_version: Optional[str] = None,
**kwargs
):
super(KeyVaultCertificateSourceParameters, self).__init__(**kwargs)
self.subscription_id = subscription_id
self.resource_group_name = resource_group_name
self.vault_name = vault_name
self.secret_name = secret_name
self.secret_version = secret_version
self.update_rule = update_rule
self.delete_rule = delete_rule
class KeyVaultSigningKeyParameters(msrest.serialization.Model):
"""Describes the parameters for using a user's KeyVault for URL Signing Key.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.KeyVaultSigningKeyParameters".
:vartype odata_type: str
:param subscription_id: Required. Subscription Id of the user's Key Vault containing the
secret.
:type subscription_id: str
:param resource_group_name: Required. Resource group of the user's Key Vault containing the
secret.
:type resource_group_name: str
:param vault_name: Required. The name of the user's Key Vault containing the secret.
:type vault_name: str
:param secret_name: Required. The name of secret in Key Vault.
:type secret_name: str
:param secret_version: Required. The version(GUID) of secret in Key Vault.
:type secret_version: str
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'subscription_id': {'required': True},
'resource_group_name': {'required': True},
'vault_name': {'required': True},
'secret_name': {'required': True},
'secret_version': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'vault_name': {'key': 'vaultName', 'type': 'str'},
'secret_name': {'key': 'secretName', 'type': 'str'},
'secret_version': {'key': 'secretVersion', 'type': 'str'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.KeyVaultSigningKeyParameters"
def __init__(
self,
*,
subscription_id: str,
resource_group_name: str,
vault_name: str,
secret_name: str,
secret_version: str,
**kwargs
):
super(KeyVaultSigningKeyParameters, self).__init__(**kwargs)
self.subscription_id = subscription_id
self.resource_group_name = resource_group_name
self.vault_name = vault_name
self.secret_name = secret_name
self.secret_version = secret_version
class LoadBalancingSettingsParameters(msrest.serialization.Model):
"""Round-Robin load balancing settings for a backend pool.
:param sample_size: The number of samples to consider for load balancing decisions.
:type sample_size: int
:param successful_samples_required: The number of samples within the sample period that must
succeed.
:type successful_samples_required: int
:param additional_latency_in_milliseconds: The additional latency in milliseconds for probes to
fall into the lowest latency bucket.
:type additional_latency_in_milliseconds: int
"""
_attribute_map = {
'sample_size': {'key': 'sampleSize', 'type': 'int'},
'successful_samples_required': {'key': 'successfulSamplesRequired', 'type': 'int'},
'additional_latency_in_milliseconds': {'key': 'additionalLatencyInMilliseconds', 'type': 'int'},
}
def __init__(
self,
*,
sample_size: Optional[int] = None,
successful_samples_required: Optional[int] = None,
additional_latency_in_milliseconds: Optional[int] = None,
**kwargs
):
super(LoadBalancingSettingsParameters, self).__init__(**kwargs)
self.sample_size = sample_size
self.successful_samples_required = successful_samples_required
self.additional_latency_in_milliseconds = additional_latency_in_milliseconds
class LoadParameters(msrest.serialization.Model):
"""Parameters required for content load.
All required parameters must be populated in order to send to Azure.
:param content_paths: Required. The path to the content to be loaded. Path should be a relative
file URL of the origin.
:type content_paths: list[str]
"""
_validation = {
'content_paths': {'required': True},
}
_attribute_map = {
'content_paths': {'key': 'contentPaths', 'type': '[str]'},
}
def __init__(
self,
*,
content_paths: List[str],
**kwargs
):
super(LoadParameters, self).__init__(**kwargs)
self.content_paths = content_paths
class ManagedCertificate(Certificate):
"""Managed Certificate used for https.
:param subject: Subject name in the certificate.
:type subject: str
:param expiration_date: Certificate expiration date.
:type expiration_date: str
:param thumbprint: Certificate thumbprint.
:type thumbprint: str
"""
_attribute_map = {
'subject': {'key': 'subject', 'type': 'str'},
'expiration_date': {'key': 'expirationDate', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
}
def __init__(
self,
*,
subject: Optional[str] = None,
expiration_date: Optional[str] = None,
thumbprint: Optional[str] = None,
**kwargs
):
super(ManagedCertificate, self).__init__(subject=subject, expiration_date=expiration_date, thumbprint=thumbprint, **kwargs)
class ManagedCertificateParameters(SecretParameters):
"""Managed Certificate used for https.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of the Secret to create.Constant filled by server. Possible
values include: "UrlSigningKey", "CustomerCertificate", "ManagedCertificate".
:type type: str or ~azure.mgmt.cdn.models.SecretType
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ManagedCertificateParameters, self).__init__(**kwargs)
self.type = 'ManagedCertificate' # type: str
class ManagedRuleDefinition(msrest.serialization.Model):
"""Describes a managed rule definition.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar rule_id: Identifier for the managed rule.
:vartype rule_id: str
:ivar description: Describes the functionality of the managed rule.
:vartype description: str
"""
_validation = {
'rule_id': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'rule_id': {'key': 'ruleId', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ManagedRuleDefinition, self).__init__(**kwargs)
self.rule_id = None
self.description = None
class ManagedRuleGroupDefinition(msrest.serialization.Model):
"""Describes a managed rule group.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar rule_group_name: Name of the managed rule group.
:vartype rule_group_name: str
:ivar description: Description of the managed rule group.
:vartype description: str
:ivar rules: List of rules within the managed rule group.
:vartype rules: list[~azure.mgmt.cdn.models.ManagedRuleDefinition]
"""
_validation = {
'rule_group_name': {'readonly': True},
'description': {'readonly': True},
'rules': {'readonly': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[ManagedRuleDefinition]'},
}
def __init__(
self,
**kwargs
):
super(ManagedRuleGroupDefinition, self).__init__(**kwargs)
self.rule_group_name = None
self.description = None
self.rules = None
class ManagedRuleGroupOverride(msrest.serialization.Model):
"""Defines a managed rule group override setting.
All required parameters must be populated in order to send to Azure.
:param rule_group_name: Required. Describes the managed rule group within the rule set to
override.
:type rule_group_name: str
:param rules: List of rules that will be disabled. If none specified, all rules in the group
will be disabled.
:type rules: list[~azure.mgmt.cdn.models.ManagedRuleOverride]
"""
_validation = {
'rule_group_name': {'required': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[ManagedRuleOverride]'},
}
def __init__(
self,
*,
rule_group_name: str,
rules: Optional[List["ManagedRuleOverride"]] = None,
**kwargs
):
super(ManagedRuleGroupOverride, self).__init__(**kwargs)
self.rule_group_name = rule_group_name
self.rules = rules
class ManagedRuleOverride(msrest.serialization.Model):
"""Defines a managed rule group override setting.
All required parameters must be populated in order to send to Azure.
:param rule_id: Required. Identifier for the managed rule.
:type rule_id: str
:param enabled_state: Describes if the managed rule is in enabled or disabled state. Defaults
to Disabled if not specified. Possible values include: "Disabled", "Enabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.ManagedRuleEnabledState
:param action: Describes the override action to be applied when rule matches. Possible values
include: "Allow", "Block", "Log", "Redirect".
:type action: str or ~azure.mgmt.cdn.models.ActionType
"""
_validation = {
'rule_id': {'required': True},
}
_attribute_map = {
'rule_id': {'key': 'ruleId', 'type': 'str'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'action': {'key': 'action', 'type': 'str'},
}
def __init__(
self,
*,
rule_id: str,
enabled_state: Optional[Union[str, "ManagedRuleEnabledState"]] = None,
action: Optional[Union[str, "ActionType"]] = None,
**kwargs
):
super(ManagedRuleOverride, self).__init__(**kwargs)
self.rule_id = rule_id
self.enabled_state = enabled_state
self.action = action
class ManagedRuleSet(msrest.serialization.Model):
"""Defines a managed rule set.
All required parameters must be populated in order to send to Azure.
:param rule_set_type: Required. Defines the rule set type to use.
:type rule_set_type: str
:param rule_set_version: Required. Defines the version of the rule set to use.
:type rule_set_version: str
:param anomaly_score: Verizon only : If the rule set supports anomaly detection mode, this
describes the threshold for blocking requests.
:type anomaly_score: int
:param rule_group_overrides: Defines the rule overrides to apply to the rule set.
:type rule_group_overrides: list[~azure.mgmt.cdn.models.ManagedRuleGroupOverride]
"""
_validation = {
'rule_set_type': {'required': True},
'rule_set_version': {'required': True},
'anomaly_score': {'maximum': 20, 'minimum': 0},
}
_attribute_map = {
'rule_set_type': {'key': 'ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'ruleSetVersion', 'type': 'str'},
'anomaly_score': {'key': 'anomalyScore', 'type': 'int'},
'rule_group_overrides': {'key': 'ruleGroupOverrides', 'type': '[ManagedRuleGroupOverride]'},
}
def __init__(
self,
*,
rule_set_type: str,
rule_set_version: str,
anomaly_score: Optional[int] = None,
rule_group_overrides: Optional[List["ManagedRuleGroupOverride"]] = None,
**kwargs
):
super(ManagedRuleSet, self).__init__(**kwargs)
self.rule_set_type = rule_set_type
self.rule_set_version = rule_set_version
self.anomaly_score = anomaly_score
self.rule_group_overrides = rule_group_overrides
class ManagedRuleSetDefinition(Resource):
"""Describes a managed rule set definition.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param sku: The pricing tier (defines a CDN provider, feature list and rate) of the
CdnWebApplicationFirewallPolicy.
:type sku: ~azure.mgmt.cdn.models.Sku
:ivar provisioning_state: Provisioning state of the managed rule set.
:vartype provisioning_state: str
:ivar rule_set_type: Type of the managed rule set.
:vartype rule_set_type: str
:ivar rule_set_version: Version of the managed rule set type.
:vartype rule_set_version: str
:ivar rule_groups: Rule groups of the managed rule set.
:vartype rule_groups: list[~azure.mgmt.cdn.models.ManagedRuleGroupDefinition]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'rule_set_type': {'readonly': True},
'rule_set_version': {'readonly': True},
'rule_groups': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'sku': {'key': 'sku', 'type': 'Sku'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'rule_set_type': {'key': 'properties.ruleSetType', 'type': 'str'},
'rule_set_version': {'key': 'properties.ruleSetVersion', 'type': 'str'},
'rule_groups': {'key': 'properties.ruleGroups', 'type': '[ManagedRuleGroupDefinition]'},
}
def __init__(
self,
*,
sku: Optional["Sku"] = None,
**kwargs
):
super(ManagedRuleSetDefinition, self).__init__(**kwargs)
self.sku = sku
self.provisioning_state = None
self.rule_set_type = None
self.rule_set_version = None
self.rule_groups = None
class ManagedRuleSetDefinitionList(msrest.serialization.Model):
"""List of managed rule set definitions available for use in a policy.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of managed rule set definitions.
:vartype value: list[~azure.mgmt.cdn.models.ManagedRuleSetDefinition]
:param next_link: URL to retrieve next set of managed rule set definitions.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ManagedRuleSetDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(ManagedRuleSetDefinitionList, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class ManagedRuleSetList(msrest.serialization.Model):
"""Defines the list of managed rule sets for the policy.
:param managed_rule_sets: List of rule sets.
:type managed_rule_sets: list[~azure.mgmt.cdn.models.ManagedRuleSet]
"""
_attribute_map = {
'managed_rule_sets': {'key': 'managedRuleSets', 'type': '[ManagedRuleSet]'},
}
def __init__(
self,
*,
managed_rule_sets: Optional[List["ManagedRuleSet"]] = None,
**kwargs
):
super(ManagedRuleSetList, self).__init__(**kwargs)
self.managed_rule_sets = managed_rule_sets
class MatchCondition(msrest.serialization.Model):
"""Define match conditions.
All required parameters must be populated in order to send to Azure.
:param match_variable: Required. Match variable to compare against. Possible values include:
"RemoteAddress", "RequestMethod", "QueryString", "PostArgs", "RequestUri", "RequestHeader",
"RequestBody", "RequestScheme", "UrlPath", "UrlFileExtension", "UrlFileName", "HttpVersion",
"Cookies", "IsDevice", "RemoteAddr", "SocketAddr".
:type match_variable: str or ~azure.mgmt.cdn.models.MatchVariable
:param selector: Selector can used to match a specific key for QueryString, Cookies,
RequestHeader or PostArgs.
:type selector: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"IPMatch", "GeoMatch", "Equal", "Contains", "LessThan", "GreaterThan", "LessThanOrEqual",
"GreaterThanOrEqual", "BeginsWith", "EndsWith", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.Operator
:param negate_condition: Describes if the result of this condition should be negated.
:type negate_condition: bool
:param match_value: Required. List of possible match values.
:type match_value: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.TransformType]
"""
_validation = {
'match_variable': {'required': True},
'operator': {'required': True},
'match_value': {'required': True},
}
_attribute_map = {
'match_variable': {'key': 'matchVariable', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_value': {'key': 'matchValue', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
def __init__(
self,
*,
match_variable: Union[str, "MatchVariable"],
operator: Union[str, "Operator"],
match_value: List[str],
selector: Optional[str] = None,
negate_condition: Optional[bool] = None,
transforms: Optional[List[Union[str, "TransformType"]]] = None,
**kwargs
):
super(MatchCondition, self).__init__(**kwargs)
self.match_variable = match_variable
self.selector = selector
self.operator = operator
self.negate_condition = negate_condition
self.match_value = match_value
self.transforms = transforms
class MetricsResponse(msrest.serialization.Model):
"""Metrics Response.
:param date_time_begin:
:type date_time_begin: ~datetime.datetime
:param date_time_end:
:type date_time_end: ~datetime.datetime
:param granularity: Possible values include: "PT5M", "PT1H", "P1D".
:type granularity: str or ~azure.mgmt.cdn.models.MetricsResponseGranularity
:param series:
:type series: list[~azure.mgmt.cdn.models.MetricsResponseSeriesItem]
"""
_attribute_map = {
'date_time_begin': {'key': 'dateTimeBegin', 'type': 'iso-8601'},
'date_time_end': {'key': 'dateTimeEnd', 'type': 'iso-8601'},
'granularity': {'key': 'granularity', 'type': 'str'},
'series': {'key': 'series', 'type': '[MetricsResponseSeriesItem]'},
}
def __init__(
self,
*,
date_time_begin: Optional[datetime.datetime] = None,
date_time_end: Optional[datetime.datetime] = None,
granularity: Optional[Union[str, "MetricsResponseGranularity"]] = None,
series: Optional[List["MetricsResponseSeriesItem"]] = None,
**kwargs
):
super(MetricsResponse, self).__init__(**kwargs)
self.date_time_begin = date_time_begin
self.date_time_end = date_time_end
self.granularity = granularity
self.series = series
class MetricsResponseSeriesItem(msrest.serialization.Model):
"""MetricsResponseSeriesItem.
:param metric:
:type metric: str
:param unit: Possible values include: "count", "bytes", "bitsPerSecond".
:type unit: str or ~azure.mgmt.cdn.models.MetricsResponseSeriesItemUnit
:param groups:
:type groups: list[~azure.mgmt.cdn.models.MetricsResponseSeriesPropertiesItemsItem]
:param data:
:type data:
list[~azure.mgmt.cdn.models.Components1Gs0LlpSchemasMetricsresponsePropertiesSeriesItemsPropertiesDataItems]
"""
_attribute_map = {
'metric': {'key': 'metric', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'groups': {'key': 'groups', 'type': '[MetricsResponseSeriesPropertiesItemsItem]'},
'data': {'key': 'data', 'type': '[Components1Gs0LlpSchemasMetricsresponsePropertiesSeriesItemsPropertiesDataItems]'},
}
def __init__(
self,
*,
metric: Optional[str] = None,
unit: Optional[Union[str, "MetricsResponseSeriesItemUnit"]] = None,
groups: Optional[List["MetricsResponseSeriesPropertiesItemsItem"]] = None,
data: Optional[List["Components1Gs0LlpSchemasMetricsresponsePropertiesSeriesItemsPropertiesDataItems"]] = None,
**kwargs
):
super(MetricsResponseSeriesItem, self).__init__(**kwargs)
self.metric = metric
self.unit = unit
self.groups = groups
self.data = data
class MetricsResponseSeriesPropertiesItemsItem(msrest.serialization.Model):
"""MetricsResponseSeriesPropertiesItemsItem.
:param name:
:type name: str
:param value:
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
super(MetricsResponseSeriesPropertiesItemsItem, self).__init__(**kwargs)
self.name = name
self.value = value
class Operation(msrest.serialization.Model):
"""CDN REST API operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Operation name: {provider}/{resource}/{operation}.
:vartype name: str
:param display: The object that represents the operation.
:type display: ~azure.mgmt.cdn.models.OperationDisplay
"""
_validation = {
'name': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
}
def __init__(
self,
*,
display: Optional["OperationDisplay"] = None,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.name = None
self.display = display
class OperationDisplay(msrest.serialization.Model):
"""The object that represents the operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provider: Service provider: Microsoft.Cdn.
:vartype provider: str
:ivar resource: Resource on which the operation is performed: Profile, endpoint, etc.
:vartype resource: str
:ivar operation: Operation type: Read, write, delete, etc.
:vartype operation: str
"""
_validation = {
'provider': {'readonly': True},
'resource': {'readonly': True},
'operation': {'readonly': True},
}
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(OperationDisplay, self).__init__(**kwargs)
self.provider = None
self.resource = None
self.operation = None
class OperationsListResult(msrest.serialization.Model):
"""Result of the request to list CDN operations. It contains a list of operations and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN operations supported by the CDN resource provider.
:vartype value: list[~azure.mgmt.cdn.models.Operation]
:param next_link: URL to get the next set of operation list results if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Operation]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(OperationsListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class Origin(Resource):
"""CDN origin is the source of the content being delivered via CDN. When the edge nodes represented by an endpoint do not have the requested content cached, they attempt to fetch it from one or more of the configured origins.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param enabled: Origin is enabled for load balancing or not.
:type enabled: bool
:param private_link_alias: The Alias of the Private Link resource. Populating this optional
field indicates that this origin is 'Private'.
:type private_link_alias: str
:param private_link_resource_id: The Resource Id of the Private Link resource. Populating this
optional field indicates that this backend is 'Private'.
:type private_link_resource_id: str
:param private_link_location: The location of the Private Link resource. Required only if
'privateLinkResourceId' is populated.
:type private_link_location: str
:param private_link_approval_message: A custom message to be included in the approval request
to connect to the Private Link.
:type private_link_approval_message: str
:ivar resource_state: Resource status of the origin. Possible values include: "Creating",
"Active", "Deleting".
:vartype resource_state: str or ~azure.mgmt.cdn.models.OriginResourceState
:ivar provisioning_state: Provisioning status of the origin.
:vartype provisioning_state: str
:ivar private_endpoint_status: The approval status for the connection to the Private Link.
Possible values include: "Pending", "Approved", "Rejected", "Disconnected", "Timeout".
:vartype private_endpoint_status: str or ~azure.mgmt.cdn.models.PrivateEndpointStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
'private_endpoint_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'http_port': {'key': 'properties.httpPort', 'type': 'int'},
'https_port': {'key': 'properties.httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'weight': {'key': 'properties.weight', 'type': 'int'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'private_link_alias': {'key': 'properties.privateLinkAlias', 'type': 'str'},
'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
'private_link_location': {'key': 'properties.privateLinkLocation', 'type': 'str'},
'private_link_approval_message': {'key': 'properties.privateLinkApprovalMessage', 'type': 'str'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'private_endpoint_status': {'key': 'properties.privateEndpointStatus', 'type': 'str'},
}
def __init__(
self,
*,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
enabled: Optional[bool] = None,
private_link_alias: Optional[str] = None,
private_link_resource_id: Optional[str] = None,
private_link_location: Optional[str] = None,
private_link_approval_message: Optional[str] = None,
**kwargs
):
super(Origin, self).__init__(**kwargs)
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.enabled = enabled
self.private_link_alias = private_link_alias
self.private_link_resource_id = private_link_resource_id
self.private_link_location = private_link_location
self.private_link_approval_message = private_link_approval_message
self.resource_state = None
self.provisioning_state = None
self.private_endpoint_status = None
class OriginGroup(Resource):
"""Origin group comprising of origins is used for load balancing to origins when the content cannot be served from CDN.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param origins: The source of the content being delivered via CDN within given origin group.
:type origins: list[~azure.mgmt.cdn.models.ResourceReference]
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
:ivar resource_state: Resource status of the origin group. Possible values include: "Creating",
"Active", "Deleting".
:vartype resource_state: str or ~azure.mgmt.cdn.models.OriginGroupResourceState
:ivar provisioning_state: Provisioning status of the origin group.
:vartype provisioning_state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'health_probe_settings': {'key': 'properties.healthProbeSettings', 'type': 'HealthProbeParameters'},
'origins': {'key': 'properties.origins', 'type': '[ResourceReference]'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'properties.trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_origin_error_detection_settings': {'key': 'properties.responseBasedOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
health_probe_settings: Optional["HealthProbeParameters"] = None,
origins: Optional[List["ResourceReference"]] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
**kwargs
):
super(OriginGroup, self).__init__(**kwargs)
self.health_probe_settings = health_probe_settings
self.origins = origins
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_origin_error_detection_settings = response_based_origin_error_detection_settings
self.resource_state = None
self.provisioning_state = None
class OriginGroupListResult(msrest.serialization.Model):
"""Result of the request to list origin groups. It contains a list of origin groups objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN origin groups within an endpoint.
:vartype value: list[~azure.mgmt.cdn.models.OriginGroup]
:param next_link: URL to get the next set of origin objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[OriginGroup]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(OriginGroupListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class OriginGroupOverrideAction(DeliveryRuleAction):
"""Defines the origin group override action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.OriginGroupOverrideActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'OriginGroupOverrideActionParameters'},
}
def __init__(
self,
*,
parameters: "OriginGroupOverrideActionParameters",
**kwargs
):
super(OriginGroupOverrideAction, self).__init__(**kwargs)
self.name = 'OriginGroupOverride' # type: str
self.parameters = parameters
class OriginGroupOverrideActionParameters(msrest.serialization.Model):
"""Defines the parameters for the origin group override action.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleOriginGroupOverrideActionParameters".
:vartype odata_type: str
:param origin_group: Required. defines the OriginGroup that would override the
DefaultOriginGroup.
:type origin_group: ~azure.mgmt.cdn.models.ResourceReference
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'origin_group': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'origin_group': {'key': 'originGroup', 'type': 'ResourceReference'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleOriginGroupOverrideActionParameters"
def __init__(
self,
*,
origin_group: "ResourceReference",
**kwargs
):
super(OriginGroupOverrideActionParameters, self).__init__(**kwargs)
self.origin_group = origin_group
class OriginGroupUpdatePropertiesParameters(msrest.serialization.Model):
"""The JSON object that contains the properties of the origin group.
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param origins: The source of the content being delivered via CDN within given origin group.
:type origins: list[~azure.mgmt.cdn.models.ResourceReference]
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
"""
_validation = {
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
}
_attribute_map = {
'health_probe_settings': {'key': 'healthProbeSettings', 'type': 'HealthProbeParameters'},
'origins': {'key': 'origins', 'type': '[ResourceReference]'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_origin_error_detection_settings': {'key': 'responseBasedOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
}
def __init__(
self,
*,
health_probe_settings: Optional["HealthProbeParameters"] = None,
origins: Optional[List["ResourceReference"]] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
**kwargs
):
super(OriginGroupUpdatePropertiesParameters, self).__init__(**kwargs)
self.health_probe_settings = health_probe_settings
self.origins = origins
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_origin_error_detection_settings = response_based_origin_error_detection_settings
class OriginGroupProperties(OriginGroupUpdatePropertiesParameters):
"""The JSON object that contains the properties of the origin group.
Variables are only populated by the server, and will be ignored when sending a request.
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param origins: The source of the content being delivered via CDN within given origin group.
:type origins: list[~azure.mgmt.cdn.models.ResourceReference]
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
:ivar resource_state: Resource status of the origin group. Possible values include: "Creating",
"Active", "Deleting".
:vartype resource_state: str or ~azure.mgmt.cdn.models.OriginGroupResourceState
:ivar provisioning_state: Provisioning status of the origin group.
:vartype provisioning_state: str
"""
_validation = {
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'health_probe_settings': {'key': 'healthProbeSettings', 'type': 'HealthProbeParameters'},
'origins': {'key': 'origins', 'type': '[ResourceReference]'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_origin_error_detection_settings': {'key': 'responseBasedOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
'resource_state': {'key': 'resourceState', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
}
def __init__(
self,
*,
health_probe_settings: Optional["HealthProbeParameters"] = None,
origins: Optional[List["ResourceReference"]] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
**kwargs
):
super(OriginGroupProperties, self).__init__(health_probe_settings=health_probe_settings, origins=origins, traffic_restoration_time_to_healed_or_new_endpoints_in_minutes=traffic_restoration_time_to_healed_or_new_endpoints_in_minutes, response_based_origin_error_detection_settings=response_based_origin_error_detection_settings, **kwargs)
self.resource_state = None
self.provisioning_state = None
class OriginGroupUpdateParameters(msrest.serialization.Model):
"""Origin group properties needed for origin group creation or update.
:param health_probe_settings: Health probe settings to the origin that is used to determine the
health of the origin.
:type health_probe_settings: ~azure.mgmt.cdn.models.HealthProbeParameters
:param origins: The source of the content being delivered via CDN within given origin group.
:type origins: list[~azure.mgmt.cdn.models.ResourceReference]
:param traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Time in minutes to shift
the traffic to the endpoint gradually when an unhealthy endpoint comes healthy or a new
endpoint is added. Default is 10 mins. This property is currently not supported.
:type traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: int
:param response_based_origin_error_detection_settings: The JSON object that contains the
properties to determine origin health using real requests/responses. This property is currently
not supported.
:type response_based_origin_error_detection_settings:
~azure.mgmt.cdn.models.ResponseBasedOriginErrorDetectionParameters
"""
_validation = {
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'maximum': 50, 'minimum': 0},
}
_attribute_map = {
'health_probe_settings': {'key': 'properties.healthProbeSettings', 'type': 'HealthProbeParameters'},
'origins': {'key': 'properties.origins', 'type': '[ResourceReference]'},
'traffic_restoration_time_to_healed_or_new_endpoints_in_minutes': {'key': 'properties.trafficRestorationTimeToHealedOrNewEndpointsInMinutes', 'type': 'int'},
'response_based_origin_error_detection_settings': {'key': 'properties.responseBasedOriginErrorDetectionSettings', 'type': 'ResponseBasedOriginErrorDetectionParameters'},
}
def __init__(
self,
*,
health_probe_settings: Optional["HealthProbeParameters"] = None,
origins: Optional[List["ResourceReference"]] = None,
traffic_restoration_time_to_healed_or_new_endpoints_in_minutes: Optional[int] = None,
response_based_origin_error_detection_settings: Optional["ResponseBasedOriginErrorDetectionParameters"] = None,
**kwargs
):
super(OriginGroupUpdateParameters, self).__init__(**kwargs)
self.health_probe_settings = health_probe_settings
self.origins = origins
self.traffic_restoration_time_to_healed_or_new_endpoints_in_minutes = traffic_restoration_time_to_healed_or_new_endpoints_in_minutes
self.response_based_origin_error_detection_settings = response_based_origin_error_detection_settings
class OriginListResult(msrest.serialization.Model):
"""Result of the request to list origins. It contains a list of origin objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN origins within an endpoint.
:vartype value: list[~azure.mgmt.cdn.models.Origin]
:param next_link: URL to get the next set of origin objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Origin]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(OriginListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class OriginUpdatePropertiesParameters(msrest.serialization.Model):
"""The JSON object that contains the properties of the origin.
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param enabled: Origin is enabled for load balancing or not.
:type enabled: bool
:param private_link_alias: The Alias of the Private Link resource. Populating this optional
field indicates that this origin is 'Private'.
:type private_link_alias: str
:param private_link_resource_id: The Resource Id of the Private Link resource. Populating this
optional field indicates that this backend is 'Private'.
:type private_link_resource_id: str
:param private_link_location: The location of the Private Link resource. Required only if
'privateLinkResourceId' is populated.
:type private_link_location: str
:param private_link_approval_message: A custom message to be included in the approval request
to connect to the Private Link.
:type private_link_approval_message: str
"""
_validation = {
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'host_name': {'key': 'hostName', 'type': 'str'},
'http_port': {'key': 'httpPort', 'type': 'int'},
'https_port': {'key': 'httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'originHostHeader', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'weight': {'key': 'weight', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'private_link_alias': {'key': 'privateLinkAlias', 'type': 'str'},
'private_link_resource_id': {'key': 'privateLinkResourceId', 'type': 'str'},
'private_link_location': {'key': 'privateLinkLocation', 'type': 'str'},
'private_link_approval_message': {'key': 'privateLinkApprovalMessage', 'type': 'str'},
}
def __init__(
self,
*,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
enabled: Optional[bool] = None,
private_link_alias: Optional[str] = None,
private_link_resource_id: Optional[str] = None,
private_link_location: Optional[str] = None,
private_link_approval_message: Optional[str] = None,
**kwargs
):
super(OriginUpdatePropertiesParameters, self).__init__(**kwargs)
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.enabled = enabled
self.private_link_alias = private_link_alias
self.private_link_resource_id = private_link_resource_id
self.private_link_location = private_link_location
self.private_link_approval_message = private_link_approval_message
class OriginProperties(OriginUpdatePropertiesParameters):
"""The JSON object that contains the properties of the origin.
Variables are only populated by the server, and will be ignored when sending a request.
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param enabled: Origin is enabled for load balancing or not.
:type enabled: bool
:param private_link_alias: The Alias of the Private Link resource. Populating this optional
field indicates that this origin is 'Private'.
:type private_link_alias: str
:param private_link_resource_id: The Resource Id of the Private Link resource. Populating this
optional field indicates that this backend is 'Private'.
:type private_link_resource_id: str
:param private_link_location: The location of the Private Link resource. Required only if
'privateLinkResourceId' is populated.
:type private_link_location: str
:param private_link_approval_message: A custom message to be included in the approval request
to connect to the Private Link.
:type private_link_approval_message: str
:ivar resource_state: Resource status of the origin. Possible values include: "Creating",
"Active", "Deleting".
:vartype resource_state: str or ~azure.mgmt.cdn.models.OriginResourceState
:ivar provisioning_state: Provisioning status of the origin.
:vartype provisioning_state: str
:ivar private_endpoint_status: The approval status for the connection to the Private Link.
Possible values include: "Pending", "Approved", "Rejected", "Disconnected", "Timeout".
:vartype private_endpoint_status: str or ~azure.mgmt.cdn.models.PrivateEndpointStatus
"""
_validation = {
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
'private_endpoint_status': {'readonly': True},
}
_attribute_map = {
'host_name': {'key': 'hostName', 'type': 'str'},
'http_port': {'key': 'httpPort', 'type': 'int'},
'https_port': {'key': 'httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'originHostHeader', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'weight': {'key': 'weight', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'private_link_alias': {'key': 'privateLinkAlias', 'type': 'str'},
'private_link_resource_id': {'key': 'privateLinkResourceId', 'type': 'str'},
'private_link_location': {'key': 'privateLinkLocation', 'type': 'str'},
'private_link_approval_message': {'key': 'privateLinkApprovalMessage', 'type': 'str'},
'resource_state': {'key': 'resourceState', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'private_endpoint_status': {'key': 'privateEndpointStatus', 'type': 'str'},
}
def __init__(
self,
*,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
enabled: Optional[bool] = None,
private_link_alias: Optional[str] = None,
private_link_resource_id: Optional[str] = None,
private_link_location: Optional[str] = None,
private_link_approval_message: Optional[str] = None,
**kwargs
):
super(OriginProperties, self).__init__(host_name=host_name, http_port=http_port, https_port=https_port, origin_host_header=origin_host_header, priority=priority, weight=weight, enabled=enabled, private_link_alias=private_link_alias, private_link_resource_id=private_link_resource_id, private_link_location=private_link_location, private_link_approval_message=private_link_approval_message, **kwargs)
self.resource_state = None
self.provisioning_state = None
self.private_endpoint_status = None
class OriginUpdateParameters(msrest.serialization.Model):
"""Origin properties needed for origin update.
:param host_name: The address of the origin. Domain names, IPv4 addresses, and IPv6 addresses
are supported.This should be unique across all origins in an endpoint.
:type host_name: str
:param http_port: The value of the HTTP port. Must be between 1 and 65535.
:type http_port: int
:param https_port: The value of the HTTPS port. Must be between 1 and 65535.
:type https_port: int
:param origin_host_header: The host header value sent to the origin with each request. If you
leave this blank, the request hostname determines this value. Azure CDN origins, such as Web
Apps, Blob Storage, and Cloud Services require this host header value to match the origin
hostname by default. This overrides the host header defined at Endpoint.
:type origin_host_header: str
:param priority: Priority of origin in given origin group for load balancing. Higher priorities
will not be used for load balancing if any lower priority origin is healthy.Must be between 1
and 5.
:type priority: int
:param weight: Weight of the origin in given origin group for load balancing. Must be between 1
and 1000.
:type weight: int
:param enabled: Origin is enabled for load balancing or not.
:type enabled: bool
:param private_link_alias: The Alias of the Private Link resource. Populating this optional
field indicates that this origin is 'Private'.
:type private_link_alias: str
:param private_link_resource_id: The Resource Id of the Private Link resource. Populating this
optional field indicates that this backend is 'Private'.
:type private_link_resource_id: str
:param private_link_location: The location of the Private Link resource. Required only if
'privateLinkResourceId' is populated.
:type private_link_location: str
:param private_link_approval_message: A custom message to be included in the approval request
to connect to the Private Link.
:type private_link_approval_message: str
"""
_validation = {
'http_port': {'maximum': 65535, 'minimum': 1},
'https_port': {'maximum': 65535, 'minimum': 1},
'priority': {'maximum': 5, 'minimum': 1},
'weight': {'maximum': 1000, 'minimum': 1},
}
_attribute_map = {
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'http_port': {'key': 'properties.httpPort', 'type': 'int'},
'https_port': {'key': 'properties.httpsPort', 'type': 'int'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'priority': {'key': 'properties.priority', 'type': 'int'},
'weight': {'key': 'properties.weight', 'type': 'int'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'private_link_alias': {'key': 'properties.privateLinkAlias', 'type': 'str'},
'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
'private_link_location': {'key': 'properties.privateLinkLocation', 'type': 'str'},
'private_link_approval_message': {'key': 'properties.privateLinkApprovalMessage', 'type': 'str'},
}
def __init__(
self,
*,
host_name: Optional[str] = None,
http_port: Optional[int] = None,
https_port: Optional[int] = None,
origin_host_header: Optional[str] = None,
priority: Optional[int] = None,
weight: Optional[int] = None,
enabled: Optional[bool] = None,
private_link_alias: Optional[str] = None,
private_link_resource_id: Optional[str] = None,
private_link_location: Optional[str] = None,
private_link_approval_message: Optional[str] = None,
**kwargs
):
super(OriginUpdateParameters, self).__init__(**kwargs)
self.host_name = host_name
self.http_port = http_port
self.https_port = https_port
self.origin_host_header = origin_host_header
self.priority = priority
self.weight = weight
self.enabled = enabled
self.private_link_alias = private_link_alias
self.private_link_resource_id = private_link_resource_id
self.private_link_location = private_link_location
self.private_link_approval_message = private_link_approval_message
class PolicySettings(msrest.serialization.Model):
"""Defines contents of a web application firewall global configuration.
:param enabled_state: describes if the policy is in enabled state or disabled state. Possible
values include: "Disabled", "Enabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.PolicyEnabledState
:param mode: Describes if it is in detection mode or prevention mode at policy level. Possible
values include: "Prevention", "Detection".
:type mode: str or ~azure.mgmt.cdn.models.PolicyMode
:param default_redirect_url: If action type is redirect, this field represents the default
redirect URL for the client.
:type default_redirect_url: str
:param default_custom_block_response_status_code: If the action type is block, this field
defines the default customer overridable http response status code. Possible values include:
200, 403, 405, 406, 429.
:type default_custom_block_response_status_code: str or ~azure.mgmt.cdn.models.Enum46
:param default_custom_block_response_body: If the action type is block, customer can override
the response body. The body must be specified in base64 encoding.
:type default_custom_block_response_body: str
"""
_validation = {
'default_custom_block_response_body': {'pattern': r'^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{4})$'},
}
_attribute_map = {
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'mode': {'key': 'mode', 'type': 'str'},
'default_redirect_url': {'key': 'defaultRedirectUrl', 'type': 'str'},
'default_custom_block_response_status_code': {'key': 'defaultCustomBlockResponseStatusCode', 'type': 'int'},
'default_custom_block_response_body': {'key': 'defaultCustomBlockResponseBody', 'type': 'str'},
}
def __init__(
self,
*,
enabled_state: Optional[Union[str, "PolicyEnabledState"]] = None,
mode: Optional[Union[str, "PolicyMode"]] = None,
default_redirect_url: Optional[str] = None,
default_custom_block_response_status_code: Optional[Union[int, "Enum46"]] = None,
default_custom_block_response_body: Optional[str] = None,
**kwargs
):
super(PolicySettings, self).__init__(**kwargs)
self.enabled_state = enabled_state
self.mode = mode
self.default_redirect_url = default_redirect_url
self.default_custom_block_response_status_code = default_custom_block_response_status_code
self.default_custom_block_response_body = default_custom_block_response_body
class PostArgsMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for PostArgs match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRulePostArgsConditionParameters".
:vartype odata_type: str
:param selector: Name of PostArg to be matched.
:type selector: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.PostArgsOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRulePostArgsConditionParameters"
def __init__(
self,
*,
operator: Union[str, "PostArgsOperator"],
selector: Optional[str] = None,
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(PostArgsMatchConditionParameters, self).__init__(**kwargs)
self.selector = selector
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class Profile(TrackedResource):
"""CDN profile is a logical grouping of endpoints that share the same settings, such as CDN provider and pricing tier.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param location: Required. Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: Required. The pricing tier (defines a CDN provider, feature list and rate) of the
CDN profile.
:type sku: ~azure.mgmt.cdn.models.Sku
:ivar resource_state: Resource status of the profile. Possible values include: "Creating",
"Active", "Deleting", "Disabled".
:vartype resource_state: str or ~azure.mgmt.cdn.models.ProfileResourceState
:ivar provisioning_state: Provisioning status of the profile.
:vartype provisioning_state: str
:ivar frontdoor_id: The Id of the frontdoor.
:vartype frontdoor_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'location': {'required': True},
'sku': {'required': True},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
'frontdoor_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'frontdoor_id': {'key': 'properties.frontdoorId', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
sku: "Sku",
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(Profile, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.resource_state = None
self.provisioning_state = None
self.frontdoor_id = None
class ProfileListResult(msrest.serialization.Model):
"""Result of the request to list profiles. It contains a list of profile objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of CDN profiles within a resource group.
:vartype value: list[~azure.mgmt.cdn.models.Profile]
:param next_link: URL to get the next set of profile objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Profile]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(ProfileListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class ProfileUpdateParameters(msrest.serialization.Model):
"""Properties required to update a profile.
:param tags: A set of tags. Profile tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(ProfileUpdateParameters, self).__init__(**kwargs)
self.tags = tags
class ProxyResource(Resource):
"""The resource model definition for a ARM proxy resource. It will have everything other than required location and tags.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
}
def __init__(
self,
**kwargs
):
super(ProxyResource, self).__init__(**kwargs)
class PurgeParameters(msrest.serialization.Model):
"""Parameters required for content purge.
All required parameters must be populated in order to send to Azure.
:param content_paths: Required. The path to the content to be purged. Can describe a file path
or a wild card directory.
:type content_paths: list[str]
"""
_validation = {
'content_paths': {'required': True},
}
_attribute_map = {
'content_paths': {'key': 'contentPaths', 'type': '[str]'},
}
def __init__(
self,
*,
content_paths: List[str],
**kwargs
):
super(PurgeParameters, self).__init__(**kwargs)
self.content_paths = content_paths
class QueryStringMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for QueryString match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleQueryStringConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.QueryStringOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleQueryStringConditionParameters"
def __init__(
self,
*,
operator: Union[str, "QueryStringOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(QueryStringMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class RankingsResponse(msrest.serialization.Model):
"""Rankings Response.
:param date_time_begin:
:type date_time_begin: ~datetime.datetime
:param date_time_end:
:type date_time_end: ~datetime.datetime
:param tables:
:type tables: list[~azure.mgmt.cdn.models.RankingsResponseTablesItem]
"""
_attribute_map = {
'date_time_begin': {'key': 'dateTimeBegin', 'type': 'iso-8601'},
'date_time_end': {'key': 'dateTimeEnd', 'type': 'iso-8601'},
'tables': {'key': 'tables', 'type': '[RankingsResponseTablesItem]'},
}
def __init__(
self,
*,
date_time_begin: Optional[datetime.datetime] = None,
date_time_end: Optional[datetime.datetime] = None,
tables: Optional[List["RankingsResponseTablesItem"]] = None,
**kwargs
):
super(RankingsResponse, self).__init__(**kwargs)
self.date_time_begin = date_time_begin
self.date_time_end = date_time_end
self.tables = tables
class RankingsResponseTablesItem(msrest.serialization.Model):
"""RankingsResponseTablesItem.
:param ranking:
:type ranking: str
:param data:
:type data: list[~azure.mgmt.cdn.models.RankingsResponseTablesPropertiesItemsItem]
"""
_attribute_map = {
'ranking': {'key': 'ranking', 'type': 'str'},
'data': {'key': 'data', 'type': '[RankingsResponseTablesPropertiesItemsItem]'},
}
def __init__(
self,
*,
ranking: Optional[str] = None,
data: Optional[List["RankingsResponseTablesPropertiesItemsItem"]] = None,
**kwargs
):
super(RankingsResponseTablesItem, self).__init__(**kwargs)
self.ranking = ranking
self.data = data
class RankingsResponseTablesPropertiesItemsItem(msrest.serialization.Model):
"""RankingsResponseTablesPropertiesItemsItem.
:param name:
:type name: str
:param metrics:
:type metrics: list[~azure.mgmt.cdn.models.RankingsResponseTablesPropertiesItemsMetricsItem]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'metrics': {'key': 'metrics', 'type': '[RankingsResponseTablesPropertiesItemsMetricsItem]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
metrics: Optional[List["RankingsResponseTablesPropertiesItemsMetricsItem"]] = None,
**kwargs
):
super(RankingsResponseTablesPropertiesItemsItem, self).__init__(**kwargs)
self.name = name
self.metrics = metrics
class RankingsResponseTablesPropertiesItemsMetricsItem(msrest.serialization.Model):
"""RankingsResponseTablesPropertiesItemsMetricsItem.
:param metric:
:type metric: str
:param value:
:type value: long
:param percentage:
:type percentage: float
"""
_attribute_map = {
'metric': {'key': 'metric', 'type': 'str'},
'value': {'key': 'value', 'type': 'long'},
'percentage': {'key': 'percentage', 'type': 'float'},
}
def __init__(
self,
*,
metric: Optional[str] = None,
value: Optional[int] = None,
percentage: Optional[float] = None,
**kwargs
):
super(RankingsResponseTablesPropertiesItemsMetricsItem, self).__init__(**kwargs)
self.metric = metric
self.value = value
self.percentage = percentage
class RateLimitRule(CustomRule):
"""Defines a rate limiting rule that can be included in a waf policy.
All required parameters must be populated in order to send to Azure.
:param name: Required. Defines the name of the custom rule.
:type name: str
:param enabled_state: Describes if the custom rule is in enabled or disabled state. Defaults to
Enabled if not specified. Possible values include: "Disabled", "Enabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.CustomRuleEnabledState
:param priority: Required. Defines in what order this rule be evaluated in the overall list of
custom rules.
:type priority: int
:param match_conditions: Required. List of match conditions.
:type match_conditions: list[~azure.mgmt.cdn.models.MatchCondition]
:param action: Required. Describes what action to be applied when rule matches. Possible values
include: "Allow", "Block", "Log", "Redirect".
:type action: str or ~azure.mgmt.cdn.models.ActionType
:param rate_limit_threshold: Required. Defines rate limit threshold.
:type rate_limit_threshold: int
:param rate_limit_duration_in_minutes: Required. Defines rate limit duration. Default is 1
minute.
:type rate_limit_duration_in_minutes: int
"""
_validation = {
'name': {'required': True},
'priority': {'required': True, 'maximum': 1000, 'minimum': 0},
'match_conditions': {'required': True},
'action': {'required': True},
'rate_limit_threshold': {'required': True, 'minimum': 0},
'rate_limit_duration_in_minutes': {'required': True, 'maximum': 60, 'minimum': 0},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'int'},
'match_conditions': {'key': 'matchConditions', 'type': '[MatchCondition]'},
'action': {'key': 'action', 'type': 'str'},
'rate_limit_threshold': {'key': 'rateLimitThreshold', 'type': 'int'},
'rate_limit_duration_in_minutes': {'key': 'rateLimitDurationInMinutes', 'type': 'int'},
}
def __init__(
self,
*,
name: str,
priority: int,
match_conditions: List["MatchCondition"],
action: Union[str, "ActionType"],
rate_limit_threshold: int,
rate_limit_duration_in_minutes: int,
enabled_state: Optional[Union[str, "CustomRuleEnabledState"]] = None,
**kwargs
):
super(RateLimitRule, self).__init__(name=name, enabled_state=enabled_state, priority=priority, match_conditions=match_conditions, action=action, **kwargs)
self.rate_limit_threshold = rate_limit_threshold
self.rate_limit_duration_in_minutes = rate_limit_duration_in_minutes
class RateLimitRuleList(msrest.serialization.Model):
"""Defines contents of rate limit rules.
:param rules: List of rules.
:type rules: list[~azure.mgmt.cdn.models.RateLimitRule]
"""
_attribute_map = {
'rules': {'key': 'rules', 'type': '[RateLimitRule]'},
}
def __init__(
self,
*,
rules: Optional[List["RateLimitRule"]] = None,
**kwargs
):
super(RateLimitRuleList, self).__init__(**kwargs)
self.rules = rules
class RemoteAddressMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for RemoteAddress match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleRemoteAddressConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"IPMatch", "GeoMatch".
:type operator: str or ~azure.mgmt.cdn.models.RemoteAddressOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: Match values to match against. The operator will apply to each value in
here with OR semantics. If any of them match the variable with the given operator this match
condition is considered a match.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleRemoteAddressConditionParameters"
def __init__(
self,
*,
operator: Union[str, "RemoteAddressOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(RemoteAddressMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class RequestBodyMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for RequestBody match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestBodyConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.RequestBodyOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestBodyConditionParameters"
def __init__(
self,
*,
operator: Union[str, "RequestBodyOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(RequestBodyMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class RequestHeaderMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for RequestHeader match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestHeaderConditionParameters".
:vartype odata_type: str
:param selector: Name of Header to be matched.
:type selector: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.RequestHeaderOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'selector': {'key': 'selector', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestHeaderConditionParameters"
def __init__(
self,
*,
operator: Union[str, "RequestHeaderOperator"],
selector: Optional[str] = None,
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(RequestHeaderMatchConditionParameters, self).__init__(**kwargs)
self.selector = selector
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class RequestMethodMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for RequestMethod match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestMethodConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Equal".
:type operator: str or ~azure.mgmt.cdn.models.RequestMethodOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str or
~azure.mgmt.cdn.models.RequestMethodMatchConditionParametersMatchValuesItem]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestMethodConditionParameters"
def __init__(
self,
*,
operator: Union[str, "RequestMethodOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[Union[str, "RequestMethodMatchConditionParametersMatchValuesItem"]]] = None,
**kwargs
):
super(RequestMethodMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
class RequestSchemeMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for RequestScheme match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestSchemeConditionParameters".
:vartype odata_type: str
:ivar operator: Required. Describes operator to be matched. Default value: "Equal".
:vartype operator: str
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str or
~azure.mgmt.cdn.models.RequestSchemeMatchConditionParametersMatchValuesItem]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True, 'constant': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestSchemeConditionParameters"
operator = "Equal"
def __init__(
self,
*,
negate_condition: Optional[bool] = None,
match_values: Optional[List[Union[str, "RequestSchemeMatchConditionParametersMatchValuesItem"]]] = None,
**kwargs
):
super(RequestSchemeMatchConditionParameters, self).__init__(**kwargs)
self.negate_condition = negate_condition
self.match_values = match_values
class RequestUriMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for RequestUri match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestUriConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.RequestUriOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleRequestUriConditionParameters"
def __init__(
self,
*,
operator: Union[str, "RequestUriOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(RequestUriMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class ResourceReference(msrest.serialization.Model):
"""Reference to another resource.
:param id: Resource ID.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
super(ResourceReference, self).__init__(**kwargs)
self.id = id
class ResourcesResponse(msrest.serialization.Model):
"""Resources Response.
:param endpoints:
:type endpoints: list[~azure.mgmt.cdn.models.ResourcesResponseEndpointsItem]
:param custom_domains:
:type custom_domains: list[~azure.mgmt.cdn.models.ResourcesResponseCustomDomainsItem]
"""
_attribute_map = {
'endpoints': {'key': 'endpoints', 'type': '[ResourcesResponseEndpointsItem]'},
'custom_domains': {'key': 'customDomains', 'type': '[ResourcesResponseCustomDomainsItem]'},
}
def __init__(
self,
*,
endpoints: Optional[List["ResourcesResponseEndpointsItem"]] = None,
custom_domains: Optional[List["ResourcesResponseCustomDomainsItem"]] = None,
**kwargs
):
super(ResourcesResponse, self).__init__(**kwargs)
self.endpoints = endpoints
self.custom_domains = custom_domains
class ResourcesResponseCustomDomainsItem(msrest.serialization.Model):
"""ResourcesResponseCustomDomainsItem.
:param id:
:type id: str
:param name:
:type name: str
:param endpoint_id:
:type endpoint_id: str
:param history:
:type history: bool
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'endpoint_id': {'key': 'endpointId', 'type': 'str'},
'history': {'key': 'history', 'type': 'bool'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
endpoint_id: Optional[str] = None,
history: Optional[bool] = None,
**kwargs
):
super(ResourcesResponseCustomDomainsItem, self).__init__(**kwargs)
self.id = id
self.name = name
self.endpoint_id = endpoint_id
self.history = history
class ResourcesResponseEndpointsItem(msrest.serialization.Model):
"""ResourcesResponseEndpointsItem.
:param id:
:type id: str
:param name:
:type name: str
:param history:
:type history: bool
:param custom_domains:
:type custom_domains:
list[~azure.mgmt.cdn.models.ResourcesResponseEndpointsPropertiesItemsItem]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'history': {'key': 'history', 'type': 'bool'},
'custom_domains': {'key': 'customDomains', 'type': '[ResourcesResponseEndpointsPropertiesItemsItem]'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
history: Optional[bool] = None,
custom_domains: Optional[List["ResourcesResponseEndpointsPropertiesItemsItem"]] = None,
**kwargs
):
super(ResourcesResponseEndpointsItem, self).__init__(**kwargs)
self.id = id
self.name = name
self.history = history
self.custom_domains = custom_domains
class ResourcesResponseEndpointsPropertiesItemsItem(msrest.serialization.Model):
"""ResourcesResponseEndpointsPropertiesItemsItem.
:param id:
:type id: str
:param name:
:type name: str
:param endpoint_id:
:type endpoint_id: str
:param history:
:type history: bool
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'endpoint_id': {'key': 'endpointId', 'type': 'str'},
'history': {'key': 'history', 'type': 'bool'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
endpoint_id: Optional[str] = None,
history: Optional[bool] = None,
**kwargs
):
super(ResourcesResponseEndpointsPropertiesItemsItem, self).__init__(**kwargs)
self.id = id
self.name = name
self.endpoint_id = endpoint_id
self.history = history
class ResourceUsage(msrest.serialization.Model):
"""Output of check resource usage API.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar resource_type: Resource type for which the usage is provided.
:vartype resource_type: str
:ivar unit: Unit of the usage. e.g. Count.
:vartype unit: str
:ivar current_value: Actual value of usage on the specified resource type.
:vartype current_value: int
:ivar limit: Quota of the specified resource type.
:vartype limit: int
"""
_validation = {
'resource_type': {'readonly': True},
'unit': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
}
_attribute_map = {
'resource_type': {'key': 'resourceType', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'int'},
'limit': {'key': 'limit', 'type': 'int'},
}
def __init__(
self,
**kwargs
):
super(ResourceUsage, self).__init__(**kwargs)
self.resource_type = None
self.unit = None
self.current_value = None
self.limit = None
class ResourceUsageListResult(msrest.serialization.Model):
"""Output of check resource usage API.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of resource usages.
:vartype value: list[~azure.mgmt.cdn.models.ResourceUsage]
:param next_link: URL to get the next set of custom domain objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceUsage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(ResourceUsageListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class ResponseBasedOriginErrorDetectionParameters(msrest.serialization.Model):
"""The JSON object that contains the properties to determine origin health using real requests/responses.
:param response_based_detected_error_types: Type of response errors for real user requests for
which origin will be deemed unhealthy. Possible values include: "None", "TcpErrorsOnly",
"TcpAndHttpErrors".
:type response_based_detected_error_types: str or
~azure.mgmt.cdn.models.ResponseBasedDetectedErrorTypes
:param response_based_failover_threshold_percentage: The percentage of failed requests in the
sample where failover should trigger.
:type response_based_failover_threshold_percentage: int
:param http_error_ranges: The list of Http status code ranges that are considered as server
errors for origin and it is marked as unhealthy.
:type http_error_ranges: list[~azure.mgmt.cdn.models.HttpErrorRangeParameters]
"""
_validation = {
'response_based_failover_threshold_percentage': {'maximum': 100, 'minimum': 0},
}
_attribute_map = {
'response_based_detected_error_types': {'key': 'responseBasedDetectedErrorTypes', 'type': 'str'},
'response_based_failover_threshold_percentage': {'key': 'responseBasedFailoverThresholdPercentage', 'type': 'int'},
'http_error_ranges': {'key': 'httpErrorRanges', 'type': '[HttpErrorRangeParameters]'},
}
def __init__(
self,
*,
response_based_detected_error_types: Optional[Union[str, "ResponseBasedDetectedErrorTypes"]] = None,
response_based_failover_threshold_percentage: Optional[int] = None,
http_error_ranges: Optional[List["HttpErrorRangeParameters"]] = None,
**kwargs
):
super(ResponseBasedOriginErrorDetectionParameters, self).__init__(**kwargs)
self.response_based_detected_error_types = response_based_detected_error_types
self.response_based_failover_threshold_percentage = response_based_failover_threshold_percentage
self.http_error_ranges = http_error_ranges
class Route(Resource):
"""Friendly Routes name mapping to the any Routes or secret related information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param custom_domains: Domains referenced by this endpoint.
:type custom_domains: list[~azure.mgmt.cdn.models.ResourceReference]
:param origin_group: A reference to the origin group.
:type origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param origin_path: A directory path on the origin that AzureFrontDoor can use to retrieve
content from, e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param rule_sets: rule sets referenced by this endpoint.
:type rule_sets: list[~azure.mgmt.cdn.models.ResourceReference]
:param supported_protocols: List of supported protocols for this route.
:type supported_protocols: list[str or ~azure.mgmt.cdn.models.AFDEndpointProtocols]
:param patterns_to_match: The route patterns of the rule.
:type patterns_to_match: list[str]
:param compression_settings: compression settings.
:type compression_settings: object
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or
~azure.mgmt.cdn.models.AfdQueryStringCachingBehavior
:param forwarding_protocol: Protocol this rule will use when forwarding traffic to backends.
Possible values include: "HttpOnly", "HttpsOnly", "MatchRequest".
:type forwarding_protocol: str or ~azure.mgmt.cdn.models.ForwardingProtocol
:param link_to_default_domain: whether this route will be linked to the default endpoint
domain. Possible values include: "Enabled", "Disabled".
:type link_to_default_domain: str or ~azure.mgmt.cdn.models.LinkToDefaultDomain
:param https_redirect: Whether to automatically redirect HTTP traffic to HTTPS traffic. Note
that this is a easy way to set up this rule and it will be the first rule that gets executed.
Possible values include: "Enabled", "Disabled".
:type https_redirect: str or ~azure.mgmt.cdn.models.HttpsRedirect
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'custom_domains': {'key': 'properties.customDomains', 'type': '[ResourceReference]'},
'origin_group': {'key': 'properties.originGroup', 'type': 'ResourceReference'},
'origin_path': {'key': 'properties.originPath', 'type': 'str'},
'rule_sets': {'key': 'properties.ruleSets', 'type': '[ResourceReference]'},
'supported_protocols': {'key': 'properties.supportedProtocols', 'type': '[str]'},
'patterns_to_match': {'key': 'properties.patternsToMatch', 'type': '[str]'},
'compression_settings': {'key': 'properties.compressionSettings', 'type': 'object'},
'query_string_caching_behavior': {'key': 'properties.queryStringCachingBehavior', 'type': 'str'},
'forwarding_protocol': {'key': 'properties.forwardingProtocol', 'type': 'str'},
'link_to_default_domain': {'key': 'properties.linkToDefaultDomain', 'type': 'str'},
'https_redirect': {'key': 'properties.httpsRedirect', 'type': 'str'},
'enabled_state': {'key': 'properties.enabledState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
custom_domains: Optional[List["ResourceReference"]] = None,
origin_group: Optional["ResourceReference"] = None,
origin_path: Optional[str] = None,
rule_sets: Optional[List["ResourceReference"]] = None,
supported_protocols: Optional[List[Union[str, "AFDEndpointProtocols"]]] = None,
patterns_to_match: Optional[List[str]] = None,
compression_settings: Optional[object] = None,
query_string_caching_behavior: Optional[Union[str, "AfdQueryStringCachingBehavior"]] = None,
forwarding_protocol: Optional[Union[str, "ForwardingProtocol"]] = None,
link_to_default_domain: Optional[Union[str, "LinkToDefaultDomain"]] = None,
https_redirect: Optional[Union[str, "HttpsRedirect"]] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(Route, self).__init__(**kwargs)
self.custom_domains = custom_domains
self.origin_group = origin_group
self.origin_path = origin_path
self.rule_sets = rule_sets
self.supported_protocols = supported_protocols
self.patterns_to_match = patterns_to_match
self.compression_settings = compression_settings
self.query_string_caching_behavior = query_string_caching_behavior
self.forwarding_protocol = forwarding_protocol
self.link_to_default_domain = link_to_default_domain
self.https_redirect = https_redirect
self.enabled_state = enabled_state
self.provisioning_state = None
self.deployment_status = None
class RouteListResult(msrest.serialization.Model):
"""Result of the request to list routes. It contains a list of route objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of AzureFrontDoor routes within a profile.
:vartype value: list[~azure.mgmt.cdn.models.Route]
:param next_link: URL to get the next set of route objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Route]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(RouteListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class RouteUpdatePropertiesParameters(msrest.serialization.Model):
"""The JSON object that contains the properties of the domain to create.
:param custom_domains: Domains referenced by this endpoint.
:type custom_domains: list[~azure.mgmt.cdn.models.ResourceReference]
:param origin_group: A reference to the origin group.
:type origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param origin_path: A directory path on the origin that AzureFrontDoor can use to retrieve
content from, e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param rule_sets: rule sets referenced by this endpoint.
:type rule_sets: list[~azure.mgmt.cdn.models.ResourceReference]
:param supported_protocols: List of supported protocols for this route.
:type supported_protocols: list[str or ~azure.mgmt.cdn.models.AFDEndpointProtocols]
:param patterns_to_match: The route patterns of the rule.
:type patterns_to_match: list[str]
:param compression_settings: compression settings.
:type compression_settings: object
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or
~azure.mgmt.cdn.models.AfdQueryStringCachingBehavior
:param forwarding_protocol: Protocol this rule will use when forwarding traffic to backends.
Possible values include: "HttpOnly", "HttpsOnly", "MatchRequest".
:type forwarding_protocol: str or ~azure.mgmt.cdn.models.ForwardingProtocol
:param link_to_default_domain: whether this route will be linked to the default endpoint
domain. Possible values include: "Enabled", "Disabled".
:type link_to_default_domain: str or ~azure.mgmt.cdn.models.LinkToDefaultDomain
:param https_redirect: Whether to automatically redirect HTTP traffic to HTTPS traffic. Note
that this is a easy way to set up this rule and it will be the first rule that gets executed.
Possible values include: "Enabled", "Disabled".
:type https_redirect: str or ~azure.mgmt.cdn.models.HttpsRedirect
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_attribute_map = {
'custom_domains': {'key': 'customDomains', 'type': '[ResourceReference]'},
'origin_group': {'key': 'originGroup', 'type': 'ResourceReference'},
'origin_path': {'key': 'originPath', 'type': 'str'},
'rule_sets': {'key': 'ruleSets', 'type': '[ResourceReference]'},
'supported_protocols': {'key': 'supportedProtocols', 'type': '[str]'},
'patterns_to_match': {'key': 'patternsToMatch', 'type': '[str]'},
'compression_settings': {'key': 'compressionSettings', 'type': 'object'},
'query_string_caching_behavior': {'key': 'queryStringCachingBehavior', 'type': 'str'},
'forwarding_protocol': {'key': 'forwardingProtocol', 'type': 'str'},
'link_to_default_domain': {'key': 'linkToDefaultDomain', 'type': 'str'},
'https_redirect': {'key': 'httpsRedirect', 'type': 'str'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
}
def __init__(
self,
*,
custom_domains: Optional[List["ResourceReference"]] = None,
origin_group: Optional["ResourceReference"] = None,
origin_path: Optional[str] = None,
rule_sets: Optional[List["ResourceReference"]] = None,
supported_protocols: Optional[List[Union[str, "AFDEndpointProtocols"]]] = None,
patterns_to_match: Optional[List[str]] = None,
compression_settings: Optional[object] = None,
query_string_caching_behavior: Optional[Union[str, "AfdQueryStringCachingBehavior"]] = None,
forwarding_protocol: Optional[Union[str, "ForwardingProtocol"]] = None,
link_to_default_domain: Optional[Union[str, "LinkToDefaultDomain"]] = None,
https_redirect: Optional[Union[str, "HttpsRedirect"]] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(RouteUpdatePropertiesParameters, self).__init__(**kwargs)
self.custom_domains = custom_domains
self.origin_group = origin_group
self.origin_path = origin_path
self.rule_sets = rule_sets
self.supported_protocols = supported_protocols
self.patterns_to_match = patterns_to_match
self.compression_settings = compression_settings
self.query_string_caching_behavior = query_string_caching_behavior
self.forwarding_protocol = forwarding_protocol
self.link_to_default_domain = link_to_default_domain
self.https_redirect = https_redirect
self.enabled_state = enabled_state
class RouteProperties(AFDStateProperties, RouteUpdatePropertiesParameters):
"""The JSON object that contains the properties of the Routes to create.
Variables are only populated by the server, and will be ignored when sending a request.
:param custom_domains: Domains referenced by this endpoint.
:type custom_domains: list[~azure.mgmt.cdn.models.ResourceReference]
:param origin_group: A reference to the origin group.
:type origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param origin_path: A directory path on the origin that AzureFrontDoor can use to retrieve
content from, e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param rule_sets: rule sets referenced by this endpoint.
:type rule_sets: list[~azure.mgmt.cdn.models.ResourceReference]
:param supported_protocols: List of supported protocols for this route.
:type supported_protocols: list[str or ~azure.mgmt.cdn.models.AFDEndpointProtocols]
:param patterns_to_match: The route patterns of the rule.
:type patterns_to_match: list[str]
:param compression_settings: compression settings.
:type compression_settings: object
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or
~azure.mgmt.cdn.models.AfdQueryStringCachingBehavior
:param forwarding_protocol: Protocol this rule will use when forwarding traffic to backends.
Possible values include: "HttpOnly", "HttpsOnly", "MatchRequest".
:type forwarding_protocol: str or ~azure.mgmt.cdn.models.ForwardingProtocol
:param link_to_default_domain: whether this route will be linked to the default endpoint
domain. Possible values include: "Enabled", "Disabled".
:type link_to_default_domain: str or ~azure.mgmt.cdn.models.LinkToDefaultDomain
:param https_redirect: Whether to automatically redirect HTTP traffic to HTTPS traffic. Note
that this is a easy way to set up this rule and it will be the first rule that gets executed.
Possible values include: "Enabled", "Disabled".
:type https_redirect: str or ~azure.mgmt.cdn.models.HttpsRedirect
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'custom_domains': {'key': 'customDomains', 'type': '[ResourceReference]'},
'origin_group': {'key': 'originGroup', 'type': 'ResourceReference'},
'origin_path': {'key': 'originPath', 'type': 'str'},
'rule_sets': {'key': 'ruleSets', 'type': '[ResourceReference]'},
'supported_protocols': {'key': 'supportedProtocols', 'type': '[str]'},
'patterns_to_match': {'key': 'patternsToMatch', 'type': '[str]'},
'compression_settings': {'key': 'compressionSettings', 'type': 'object'},
'query_string_caching_behavior': {'key': 'queryStringCachingBehavior', 'type': 'str'},
'forwarding_protocol': {'key': 'forwardingProtocol', 'type': 'str'},
'link_to_default_domain': {'key': 'linkToDefaultDomain', 'type': 'str'},
'https_redirect': {'key': 'httpsRedirect', 'type': 'str'},
'enabled_state': {'key': 'enabledState', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
custom_domains: Optional[List["ResourceReference"]] = None,
origin_group: Optional["ResourceReference"] = None,
origin_path: Optional[str] = None,
rule_sets: Optional[List["ResourceReference"]] = None,
supported_protocols: Optional[List[Union[str, "AFDEndpointProtocols"]]] = None,
patterns_to_match: Optional[List[str]] = None,
compression_settings: Optional[object] = None,
query_string_caching_behavior: Optional[Union[str, "AfdQueryStringCachingBehavior"]] = None,
forwarding_protocol: Optional[Union[str, "ForwardingProtocol"]] = None,
link_to_default_domain: Optional[Union[str, "LinkToDefaultDomain"]] = None,
https_redirect: Optional[Union[str, "HttpsRedirect"]] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(RouteProperties, self).__init__(custom_domains=custom_domains, origin_group=origin_group, origin_path=origin_path, rule_sets=rule_sets, supported_protocols=supported_protocols, patterns_to_match=patterns_to_match, compression_settings=compression_settings, query_string_caching_behavior=query_string_caching_behavior, forwarding_protocol=forwarding_protocol, link_to_default_domain=link_to_default_domain, https_redirect=https_redirect, enabled_state=enabled_state, **kwargs)
self.custom_domains = custom_domains
self.origin_group = origin_group
self.origin_path = origin_path
self.rule_sets = rule_sets
self.supported_protocols = supported_protocols
self.patterns_to_match = patterns_to_match
self.compression_settings = compression_settings
self.query_string_caching_behavior = query_string_caching_behavior
self.forwarding_protocol = forwarding_protocol
self.link_to_default_domain = link_to_default_domain
self.https_redirect = https_redirect
self.enabled_state = enabled_state
self.provisioning_state = None
self.deployment_status = None
class RouteUpdateParameters(msrest.serialization.Model):
"""The domain JSON object required for domain creation or update.
:param custom_domains: Domains referenced by this endpoint.
:type custom_domains: list[~azure.mgmt.cdn.models.ResourceReference]
:param origin_group: A reference to the origin group.
:type origin_group: ~azure.mgmt.cdn.models.ResourceReference
:param origin_path: A directory path on the origin that AzureFrontDoor can use to retrieve
content from, e.g. contoso.cloudapp.net/originpath.
:type origin_path: str
:param rule_sets: rule sets referenced by this endpoint.
:type rule_sets: list[~azure.mgmt.cdn.models.ResourceReference]
:param supported_protocols: List of supported protocols for this route.
:type supported_protocols: list[str or ~azure.mgmt.cdn.models.AFDEndpointProtocols]
:param patterns_to_match: The route patterns of the rule.
:type patterns_to_match: list[str]
:param compression_settings: compression settings.
:type compression_settings: object
:param query_string_caching_behavior: Defines how CDN caches requests that include query
strings. You can ignore any query strings when caching, bypass caching to prevent requests that
contain query strings from being cached, or cache every request with a unique URL. Possible
values include: "IgnoreQueryString", "UseQueryString", "NotSet".
:type query_string_caching_behavior: str or
~azure.mgmt.cdn.models.AfdQueryStringCachingBehavior
:param forwarding_protocol: Protocol this rule will use when forwarding traffic to backends.
Possible values include: "HttpOnly", "HttpsOnly", "MatchRequest".
:type forwarding_protocol: str or ~azure.mgmt.cdn.models.ForwardingProtocol
:param link_to_default_domain: whether this route will be linked to the default endpoint
domain. Possible values include: "Enabled", "Disabled".
:type link_to_default_domain: str or ~azure.mgmt.cdn.models.LinkToDefaultDomain
:param https_redirect: Whether to automatically redirect HTTP traffic to HTTPS traffic. Note
that this is a easy way to set up this rule and it will be the first rule that gets executed.
Possible values include: "Enabled", "Disabled".
:type https_redirect: str or ~azure.mgmt.cdn.models.HttpsRedirect
:param enabled_state: Whether to enable use of this rule. Permitted values are 'Enabled' or
'Disabled'. Possible values include: "Enabled", "Disabled".
:type enabled_state: str or ~azure.mgmt.cdn.models.EnabledState
"""
_attribute_map = {
'custom_domains': {'key': 'properties.customDomains', 'type': '[ResourceReference]'},
'origin_group': {'key': 'properties.originGroup', 'type': 'ResourceReference'},
'origin_path': {'key': 'properties.originPath', 'type': 'str'},
'rule_sets': {'key': 'properties.ruleSets', 'type': '[ResourceReference]'},
'supported_protocols': {'key': 'properties.supportedProtocols', 'type': '[str]'},
'patterns_to_match': {'key': 'properties.patternsToMatch', 'type': '[str]'},
'compression_settings': {'key': 'properties.compressionSettings', 'type': 'object'},
'query_string_caching_behavior': {'key': 'properties.queryStringCachingBehavior', 'type': 'str'},
'forwarding_protocol': {'key': 'properties.forwardingProtocol', 'type': 'str'},
'link_to_default_domain': {'key': 'properties.linkToDefaultDomain', 'type': 'str'},
'https_redirect': {'key': 'properties.httpsRedirect', 'type': 'str'},
'enabled_state': {'key': 'properties.enabledState', 'type': 'str'},
}
def __init__(
self,
*,
custom_domains: Optional[List["ResourceReference"]] = None,
origin_group: Optional["ResourceReference"] = None,
origin_path: Optional[str] = None,
rule_sets: Optional[List["ResourceReference"]] = None,
supported_protocols: Optional[List[Union[str, "AFDEndpointProtocols"]]] = None,
patterns_to_match: Optional[List[str]] = None,
compression_settings: Optional[object] = None,
query_string_caching_behavior: Optional[Union[str, "AfdQueryStringCachingBehavior"]] = None,
forwarding_protocol: Optional[Union[str, "ForwardingProtocol"]] = None,
link_to_default_domain: Optional[Union[str, "LinkToDefaultDomain"]] = None,
https_redirect: Optional[Union[str, "HttpsRedirect"]] = None,
enabled_state: Optional[Union[str, "EnabledState"]] = None,
**kwargs
):
super(RouteUpdateParameters, self).__init__(**kwargs)
self.custom_domains = custom_domains
self.origin_group = origin_group
self.origin_path = origin_path
self.rule_sets = rule_sets
self.supported_protocols = supported_protocols
self.patterns_to_match = patterns_to_match
self.compression_settings = compression_settings
self.query_string_caching_behavior = query_string_caching_behavior
self.forwarding_protocol = forwarding_protocol
self.link_to_default_domain = link_to_default_domain
self.https_redirect = https_redirect
self.enabled_state = enabled_state
class Rule(Resource):
"""Friendly Rules name mapping to the any Rules or secret related information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:param order: The order in which the rules are applied for the endpoint. Possible values
{0,1,2,3,………}. A rule with a lesser order will be applied before a rule with a greater order.
Rule with order 0 is a special rule. It does not require any condition and actions listed in it
will always be applied.
:type order: int
:param conditions: A list of conditions that must be matched for the actions to be executed.
:type conditions: list[~azure.mgmt.cdn.models.DeliveryRuleCondition]
:param actions: A list of actions that are executed when all the conditions of a rule are
satisfied.
:type actions: list[~azure.mgmt.cdn.models.DeliveryRuleAction]
:param match_processing_behavior: If this rule is a match should the rules engine continue
running the remaining rules or stop. If not present, defaults to Continue. Possible values
include: "Continue", "Stop".
:type match_processing_behavior: str or ~azure.mgmt.cdn.models.MatchProcessingBehavior
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'order': {'key': 'properties.order', 'type': 'int'},
'conditions': {'key': 'properties.conditions', 'type': '[DeliveryRuleCondition]'},
'actions': {'key': 'properties.actions', 'type': '[DeliveryRuleAction]'},
'match_processing_behavior': {'key': 'properties.matchProcessingBehavior', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
order: Optional[int] = None,
conditions: Optional[List["DeliveryRuleCondition"]] = None,
actions: Optional[List["DeliveryRuleAction"]] = None,
match_processing_behavior: Optional[Union[str, "MatchProcessingBehavior"]] = None,
**kwargs
):
super(Rule, self).__init__(**kwargs)
self.order = order
self.conditions = conditions
self.actions = actions
self.match_processing_behavior = match_processing_behavior
self.provisioning_state = None
self.deployment_status = None
class RuleListResult(msrest.serialization.Model):
"""Result of the request to list rules. It contains a list of rule objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of AzureFrontDoor rules within a rule set.
:vartype value: list[~azure.mgmt.cdn.models.Rule]
:param next_link: URL to get the next set of rule objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Rule]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(RuleListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class RuleUpdatePropertiesParameters(msrest.serialization.Model):
"""The JSON object that contains the properties of the domain to create.
:param order: The order in which the rules are applied for the endpoint. Possible values
{0,1,2,3,………}. A rule with a lesser order will be applied before a rule with a greater order.
Rule with order 0 is a special rule. It does not require any condition and actions listed in it
will always be applied.
:type order: int
:param conditions: A list of conditions that must be matched for the actions to be executed.
:type conditions: list[~azure.mgmt.cdn.models.DeliveryRuleCondition]
:param actions: A list of actions that are executed when all the conditions of a rule are
satisfied.
:type actions: list[~azure.mgmt.cdn.models.DeliveryRuleAction]
:param match_processing_behavior: If this rule is a match should the rules engine continue
running the remaining rules or stop. If not present, defaults to Continue. Possible values
include: "Continue", "Stop".
:type match_processing_behavior: str or ~azure.mgmt.cdn.models.MatchProcessingBehavior
"""
_attribute_map = {
'order': {'key': 'order', 'type': 'int'},
'conditions': {'key': 'conditions', 'type': '[DeliveryRuleCondition]'},
'actions': {'key': 'actions', 'type': '[DeliveryRuleAction]'},
'match_processing_behavior': {'key': 'matchProcessingBehavior', 'type': 'str'},
}
def __init__(
self,
*,
order: Optional[int] = None,
conditions: Optional[List["DeliveryRuleCondition"]] = None,
actions: Optional[List["DeliveryRuleAction"]] = None,
match_processing_behavior: Optional[Union[str, "MatchProcessingBehavior"]] = None,
**kwargs
):
super(RuleUpdatePropertiesParameters, self).__init__(**kwargs)
self.order = order
self.conditions = conditions
self.actions = actions
self.match_processing_behavior = match_processing_behavior
class RuleProperties(AFDStateProperties, RuleUpdatePropertiesParameters):
"""The JSON object that contains the properties of the Rules to create.
Variables are only populated by the server, and will be ignored when sending a request.
:param order: The order in which the rules are applied for the endpoint. Possible values
{0,1,2,3,………}. A rule with a lesser order will be applied before a rule with a greater order.
Rule with order 0 is a special rule. It does not require any condition and actions listed in it
will always be applied.
:type order: int
:param conditions: A list of conditions that must be matched for the actions to be executed.
:type conditions: list[~azure.mgmt.cdn.models.DeliveryRuleCondition]
:param actions: A list of actions that are executed when all the conditions of a rule are
satisfied.
:type actions: list[~azure.mgmt.cdn.models.DeliveryRuleAction]
:param match_processing_behavior: If this rule is a match should the rules engine continue
running the remaining rules or stop. If not present, defaults to Continue. Possible values
include: "Continue", "Stop".
:type match_processing_behavior: str or ~azure.mgmt.cdn.models.MatchProcessingBehavior
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'order': {'key': 'order', 'type': 'int'},
'conditions': {'key': 'conditions', 'type': '[DeliveryRuleCondition]'},
'actions': {'key': 'actions', 'type': '[DeliveryRuleAction]'},
'match_processing_behavior': {'key': 'matchProcessingBehavior', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
}
def __init__(
self,
*,
order: Optional[int] = None,
conditions: Optional[List["DeliveryRuleCondition"]] = None,
actions: Optional[List["DeliveryRuleAction"]] = None,
match_processing_behavior: Optional[Union[str, "MatchProcessingBehavior"]] = None,
**kwargs
):
super(RuleProperties, self).__init__(order=order, conditions=conditions, actions=actions, match_processing_behavior=match_processing_behavior, **kwargs)
self.order = order
self.conditions = conditions
self.actions = actions
self.match_processing_behavior = match_processing_behavior
self.provisioning_state = None
self.deployment_status = None
class RuleSet(Resource):
"""Friendly RuleSet name mapping to the any RuleSet or secret related information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RuleSet, self).__init__(**kwargs)
self.provisioning_state = None
self.deployment_status = None
class RuleSetListResult(msrest.serialization.Model):
"""Result of the request to list rule sets. It contains a list of rule set objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of AzureFrontDoor rule sets within a profile.
:vartype value: list[~azure.mgmt.cdn.models.RuleSet]
:param next_link: URL to get the next set of rule set objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[RuleSet]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(RuleSetListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class RuleSetProperties(AFDStateProperties):
"""The JSON object that contains the properties of the Rule Set to create.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
"""
_validation = {
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(RuleSetProperties, self).__init__(**kwargs)
class RuleUpdateParameters(msrest.serialization.Model):
"""The domain JSON object required for domain creation or update.
:param order: The order in which the rules are applied for the endpoint. Possible values
{0,1,2,3,………}. A rule with a lesser order will be applied before a rule with a greater order.
Rule with order 0 is a special rule. It does not require any condition and actions listed in it
will always be applied.
:type order: int
:param conditions: A list of conditions that must be matched for the actions to be executed.
:type conditions: list[~azure.mgmt.cdn.models.DeliveryRuleCondition]
:param actions: A list of actions that are executed when all the conditions of a rule are
satisfied.
:type actions: list[~azure.mgmt.cdn.models.DeliveryRuleAction]
:param match_processing_behavior: If this rule is a match should the rules engine continue
running the remaining rules or stop. If not present, defaults to Continue. Possible values
include: "Continue", "Stop".
:type match_processing_behavior: str or ~azure.mgmt.cdn.models.MatchProcessingBehavior
"""
_attribute_map = {
'order': {'key': 'properties.order', 'type': 'int'},
'conditions': {'key': 'properties.conditions', 'type': '[DeliveryRuleCondition]'},
'actions': {'key': 'properties.actions', 'type': '[DeliveryRuleAction]'},
'match_processing_behavior': {'key': 'properties.matchProcessingBehavior', 'type': 'str'},
}
def __init__(
self,
*,
order: Optional[int] = None,
conditions: Optional[List["DeliveryRuleCondition"]] = None,
actions: Optional[List["DeliveryRuleAction"]] = None,
match_processing_behavior: Optional[Union[str, "MatchProcessingBehavior"]] = None,
**kwargs
):
super(RuleUpdateParameters, self).__init__(**kwargs)
self.order = order
self.conditions = conditions
self.actions = actions
self.match_processing_behavior = match_processing_behavior
class Secret(Resource):
"""Friendly Secret name mapping to the any Secret or secret related information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:param parameters: object which contains secret parameters.
:type parameters: ~azure.mgmt.cdn.models.SecretParameters
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
'parameters': {'key': 'properties.parameters', 'type': 'SecretParameters'},
}
def __init__(
self,
*,
parameters: Optional["SecretParameters"] = None,
**kwargs
):
super(Secret, self).__init__(**kwargs)
self.provisioning_state = None
self.deployment_status = None
self.parameters = parameters
class SecretListResult(msrest.serialization.Model):
"""Result of the request to list secrets. It contains a list of Secret objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of AzureFrontDoor secrets within a profile.
:vartype value: list[~azure.mgmt.cdn.models.Secret]
:param next_link: URL to get the next set of Secret objects if there are any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Secret]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(SecretListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class SecretProperties(AFDStateProperties):
"""The JSON object that contains the properties of the Secret to create.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:param parameters: object which contains secret parameters.
:type parameters: ~azure.mgmt.cdn.models.SecretParameters
"""
_validation = {
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'SecretParameters'},
}
def __init__(
self,
*,
parameters: Optional["SecretParameters"] = None,
**kwargs
):
super(SecretProperties, self).__init__(**kwargs)
self.parameters = parameters
class SecurityPolicy(Resource):
"""SecurityPolicy association for AzureFrontDoor profile.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar system_data: Read only system data.
:vartype system_data: ~azure.mgmt.cdn.models.SystemData
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:param parameters: object which contains security policy parameters.
:type parameters: ~azure.mgmt.cdn.models.SecurityPolicyParameters
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'system_data': {'readonly': True},
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'system_data': {'key': 'systemData', 'type': 'SystemData'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'deployment_status': {'key': 'properties.deploymentStatus', 'type': 'str'},
'parameters': {'key': 'properties.parameters', 'type': 'SecurityPolicyParameters'},
}
def __init__(
self,
*,
parameters: Optional["SecurityPolicyParameters"] = None,
**kwargs
):
super(SecurityPolicy, self).__init__(**kwargs)
self.provisioning_state = None
self.deployment_status = None
self.parameters = parameters
class SecurityPolicyListResult(msrest.serialization.Model):
"""Result of the request to list security policies. It contains a list of security policy objects and a URL link to get the next set of results.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: List of Security policies within a profile.
:vartype value: list[~azure.mgmt.cdn.models.SecurityPolicy]
:param next_link: URL to get the next set of security policy objects if there is any.
:type next_link: str
"""
_validation = {
'value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SecurityPolicy]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
next_link: Optional[str] = None,
**kwargs
):
super(SecurityPolicyListResult, self).__init__(**kwargs)
self.value = None
self.next_link = next_link
class SecurityPolicyParameters(msrest.serialization.Model):
"""The json object containing security policy parameters.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: SecurityPolicyWebApplicationFirewallParameters.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of the Security policy to create.Constant filled by server.
Possible values include: "WebApplicationFirewall".
:type type: str or ~azure.mgmt.cdn.models.SecurityPolicyType
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
}
_subtype_map = {
'type': {'WebApplicationFirewall': 'SecurityPolicyWebApplicationFirewallParameters'}
}
def __init__(
self,
**kwargs
):
super(SecurityPolicyParameters, self).__init__(**kwargs)
self.type = None # type: Optional[str]
class SecurityPolicyProperties(AFDStateProperties):
"""The json object that contains properties required to create a security policy.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar provisioning_state: Provisioning status. Possible values include: "Succeeded", "Failed",
"Updating", "Deleting", "Creating".
:vartype provisioning_state: str or ~azure.mgmt.cdn.models.AfdProvisioningState
:ivar deployment_status: Possible values include: "NotStarted", "InProgress", "Succeeded",
"Failed".
:vartype deployment_status: str or ~azure.mgmt.cdn.models.DeploymentStatus
:param parameters: object which contains security policy parameters.
:type parameters: ~azure.mgmt.cdn.models.SecurityPolicyParameters
"""
_validation = {
'provisioning_state': {'readonly': True},
'deployment_status': {'readonly': True},
}
_attribute_map = {
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'deployment_status': {'key': 'deploymentStatus', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'SecurityPolicyParameters'},
}
def __init__(
self,
*,
parameters: Optional["SecurityPolicyParameters"] = None,
**kwargs
):
super(SecurityPolicyProperties, self).__init__(**kwargs)
self.parameters = parameters
class SecurityPolicyWebApplicationFirewallAssociation(msrest.serialization.Model):
"""settings for security policy patterns to match.
:param domains: List of domains.
:type domains: list[~azure.mgmt.cdn.models.ResourceReference]
:param patterns_to_match: List of paths.
:type patterns_to_match: list[str]
"""
_attribute_map = {
'domains': {'key': 'domains', 'type': '[ResourceReference]'},
'patterns_to_match': {'key': 'patternsToMatch', 'type': '[str]'},
}
def __init__(
self,
*,
domains: Optional[List["ResourceReference"]] = None,
patterns_to_match: Optional[List[str]] = None,
**kwargs
):
super(SecurityPolicyWebApplicationFirewallAssociation, self).__init__(**kwargs)
self.domains = domains
self.patterns_to_match = patterns_to_match
class SecurityPolicyWebApplicationFirewallParameters(SecurityPolicyParameters):
"""The json object containing security policy waf parameters.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of the Security policy to create.Constant filled by server.
Possible values include: "WebApplicationFirewall".
:type type: str or ~azure.mgmt.cdn.models.SecurityPolicyType
:param waf_policy: Resource ID.
:type waf_policy: ~azure.mgmt.cdn.models.ResourceReference
:param associations: Waf associations.
:type associations:
list[~azure.mgmt.cdn.models.SecurityPolicyWebApplicationFirewallAssociation]
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'waf_policy': {'key': 'wafPolicy', 'type': 'ResourceReference'},
'associations': {'key': 'associations', 'type': '[SecurityPolicyWebApplicationFirewallAssociation]'},
}
def __init__(
self,
*,
waf_policy: Optional["ResourceReference"] = None,
associations: Optional[List["SecurityPolicyWebApplicationFirewallAssociation"]] = None,
**kwargs
):
super(SecurityPolicyWebApplicationFirewallParameters, self).__init__(**kwargs)
self.type = 'WebApplicationFirewall' # type: str
self.waf_policy = waf_policy
self.associations = associations
class SharedPrivateLinkResourceProperties(msrest.serialization.Model):
"""Describes the properties of an existing Shared Private Link Resource to use when connecting to a private origin.
:param private_link: The resource id of the resource the shared private link resource is for.
:type private_link: ~azure.mgmt.cdn.models.ResourceReference
:param private_link_location: The location of the shared private link resource.
:type private_link_location: str
:param group_id: The group id from the provider of resource the shared private link resource is
for.
:type group_id: str
:param request_message: The request message for requesting approval of the shared private link
resource.
:type request_message: str
:param status: Status of the shared private link resource. Can be Pending, Approved, Rejected,
Disconnected, or Timeout. Possible values include: "Pending", "Approved", "Rejected",
"Disconnected", "Timeout".
:type status: str or ~azure.mgmt.cdn.models.SharedPrivateLinkResourceStatus
"""
_attribute_map = {
'private_link': {'key': 'privateLink', 'type': 'ResourceReference'},
'private_link_location': {'key': 'privateLinkLocation', 'type': 'str'},
'group_id': {'key': 'groupId', 'type': 'str'},
'request_message': {'key': 'requestMessage', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
}
def __init__(
self,
*,
private_link: Optional["ResourceReference"] = None,
private_link_location: Optional[str] = None,
group_id: Optional[str] = None,
request_message: Optional[str] = None,
status: Optional[Union[str, "SharedPrivateLinkResourceStatus"]] = None,
**kwargs
):
super(SharedPrivateLinkResourceProperties, self).__init__(**kwargs)
self.private_link = private_link
self.private_link_location = private_link_location
self.group_id = group_id
self.request_message = request_message
self.status = status
class Sku(msrest.serialization.Model):
"""The pricing tier (defines a CDN provider, feature list and rate) of the CDN profile.
:param name: Name of the pricing tier. Possible values include: "Standard_Verizon",
"Premium_Verizon", "Custom_Verizon", "Standard_Akamai", "Standard_ChinaCdn",
"Standard_Microsoft", "Premium_ChinaCdn", "Standard_AzureFrontDoor", "Premium_AzureFrontDoor",
"Standard_955BandWidth_ChinaCdn", "Standard_AvgBandWidth_ChinaCdn", "StandardPlus_ChinaCdn",
"StandardPlus_955BandWidth_ChinaCdn", "StandardPlus_AvgBandWidth_ChinaCdn".
:type name: str or ~azure.mgmt.cdn.models.SkuName
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[Union[str, "SkuName"]] = None,
**kwargs
):
super(Sku, self).__init__(**kwargs)
self.name = name
class SsoUri(msrest.serialization.Model):
"""The URI required to login to the supplemental portal from the Azure portal.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar sso_uri_value: The URI used to login to the supplemental portal.
:vartype sso_uri_value: str
"""
_validation = {
'sso_uri_value': {'readonly': True},
}
_attribute_map = {
'sso_uri_value': {'key': 'ssoUriValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SsoUri, self).__init__(**kwargs)
self.sso_uri_value = None
class SupportedOptimizationTypesListResult(msrest.serialization.Model):
"""The result of the GetSupportedOptimizationTypes API.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar supported_optimization_types: Supported optimization types for a profile.
:vartype supported_optimization_types: list[str or ~azure.mgmt.cdn.models.OptimizationType]
"""
_validation = {
'supported_optimization_types': {'readonly': True},
}
_attribute_map = {
'supported_optimization_types': {'key': 'supportedOptimizationTypes', 'type': '[str]'},
}
def __init__(
self,
**kwargs
):
super(SupportedOptimizationTypesListResult, self).__init__(**kwargs)
self.supported_optimization_types = None
class SystemData(msrest.serialization.Model):
"""Read only system data.
:param created_by: An identifier for the identity that created the resource.
:type created_by: str
:param created_by_type: The type of identity that created the resource. Possible values
include: "user", "application", "managedIdentity", "key".
:type created_by_type: str or ~azure.mgmt.cdn.models.IdentityType
:param created_at: The timestamp of resource creation (UTC).
:type created_at: ~datetime.datetime
:param last_modified_by: An identifier for the identity that last modified the resource.
:type last_modified_by: str
:param last_modified_by_type: The type of identity that last modified the resource. Possible
values include: "user", "application", "managedIdentity", "key".
:type last_modified_by_type: str or ~azure.mgmt.cdn.models.IdentityType
:param last_modified_at: The timestamp of resource last modification (UTC).
:type last_modified_at: ~datetime.datetime
"""
_attribute_map = {
'created_by': {'key': 'createdBy', 'type': 'str'},
'created_by_type': {'key': 'createdByType', 'type': 'str'},
'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
}
def __init__(
self,
*,
created_by: Optional[str] = None,
created_by_type: Optional[Union[str, "IdentityType"]] = None,
created_at: Optional[datetime.datetime] = None,
last_modified_by: Optional[str] = None,
last_modified_by_type: Optional[Union[str, "IdentityType"]] = None,
last_modified_at: Optional[datetime.datetime] = None,
**kwargs
):
super(SystemData, self).__init__(**kwargs)
self.created_by = created_by
self.created_by_type = created_by_type
self.created_at = created_at
self.last_modified_by = last_modified_by
self.last_modified_by_type = last_modified_by_type
self.last_modified_at = last_modified_at
class UrlFileExtensionMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for UrlFileExtension match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlFileExtensionMatchConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.UrlFileExtensionOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlFileExtensionMatchConditionParameters"
def __init__(
self,
*,
operator: Union[str, "UrlFileExtensionOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(UrlFileExtensionMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class UrlFileNameMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for UrlFilename match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlFilenameConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.UrlFileNameOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlFilenameConditionParameters"
def __init__(
self,
*,
operator: Union[str, "UrlFileNameOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(UrlFileNameMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class UrlPathMatchConditionParameters(msrest.serialization.Model):
"""Defines the parameters for UrlPath match conditions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlPathMatchConditionParameters".
:vartype odata_type: str
:param operator: Required. Describes operator to be matched. Possible values include: "Any",
"Equal", "Contains", "BeginsWith", "EndsWith", "LessThan", "LessThanOrEqual", "GreaterThan",
"GreaterThanOrEqual", "Wildcard", "RegEx".
:type operator: str or ~azure.mgmt.cdn.models.UrlPathOperator
:param negate_condition: Describes if this is negate condition or not.
:type negate_condition: bool
:param match_values: The match value for the condition of the delivery rule.
:type match_values: list[str]
:param transforms: List of transforms.
:type transforms: list[str or ~azure.mgmt.cdn.models.Transform]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'operator': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'operator': {'key': 'operator', 'type': 'str'},
'negate_condition': {'key': 'negateCondition', 'type': 'bool'},
'match_values': {'key': 'matchValues', 'type': '[str]'},
'transforms': {'key': 'transforms', 'type': '[str]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlPathMatchConditionParameters"
def __init__(
self,
*,
operator: Union[str, "UrlPathOperator"],
negate_condition: Optional[bool] = None,
match_values: Optional[List[str]] = None,
transforms: Optional[List[Union[str, "Transform"]]] = None,
**kwargs
):
super(UrlPathMatchConditionParameters, self).__init__(**kwargs)
self.operator = operator
self.negate_condition = negate_condition
self.match_values = match_values
self.transforms = transforms
class UrlRedirectAction(DeliveryRuleAction):
"""Defines the url redirect action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.UrlRedirectActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'UrlRedirectActionParameters'},
}
def __init__(
self,
*,
parameters: "UrlRedirectActionParameters",
**kwargs
):
super(UrlRedirectAction, self).__init__(**kwargs)
self.name = 'UrlRedirect' # type: str
self.parameters = parameters
class UrlRedirectActionParameters(msrest.serialization.Model):
"""Defines the parameters for the url redirect action.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlRedirectActionParameters".
:vartype odata_type: str
:param redirect_type: Required. The redirect type the rule will use when redirecting traffic.
Possible values include: "Moved", "Found", "TemporaryRedirect", "PermanentRedirect".
:type redirect_type: str or ~azure.mgmt.cdn.models.RedirectType
:param destination_protocol: Protocol to use for the redirect. The default value is
MatchRequest. Possible values include: "MatchRequest", "Http", "Https".
:type destination_protocol: str or ~azure.mgmt.cdn.models.DestinationProtocol
:param custom_path: The full path to redirect. Path cannot be empty and must start with /.
Leave empty to use the incoming path as destination path.
:type custom_path: str
:param custom_hostname: Host to redirect. Leave empty to use the incoming host as the
destination host.
:type custom_hostname: str
:param custom_query_string: The set of query strings to be placed in the redirect URL. Setting
this value would replace any existing query string; leave empty to preserve the incoming query
string. Query string must be in :code:`<key>`=:code:`<value>` format. ? and & will be added
automatically so do not include them.
:type custom_query_string: str
:param custom_fragment: Fragment to add to the redirect URL. Fragment is the part of the URL
that comes after #. Do not include the #.
:type custom_fragment: str
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'redirect_type': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'redirect_type': {'key': 'redirectType', 'type': 'str'},
'destination_protocol': {'key': 'destinationProtocol', 'type': 'str'},
'custom_path': {'key': 'customPath', 'type': 'str'},
'custom_hostname': {'key': 'customHostname', 'type': 'str'},
'custom_query_string': {'key': 'customQueryString', 'type': 'str'},
'custom_fragment': {'key': 'customFragment', 'type': 'str'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlRedirectActionParameters"
def __init__(
self,
*,
redirect_type: Union[str, "RedirectType"],
destination_protocol: Optional[Union[str, "DestinationProtocol"]] = None,
custom_path: Optional[str] = None,
custom_hostname: Optional[str] = None,
custom_query_string: Optional[str] = None,
custom_fragment: Optional[str] = None,
**kwargs
):
super(UrlRedirectActionParameters, self).__init__(**kwargs)
self.redirect_type = redirect_type
self.destination_protocol = destination_protocol
self.custom_path = custom_path
self.custom_hostname = custom_hostname
self.custom_query_string = custom_query_string
self.custom_fragment = custom_fragment
class UrlRewriteAction(DeliveryRuleAction):
"""Defines the url rewrite action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.UrlRewriteActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'UrlRewriteActionParameters'},
}
def __init__(
self,
*,
parameters: "UrlRewriteActionParameters",
**kwargs
):
super(UrlRewriteAction, self).__init__(**kwargs)
self.name = 'UrlRewrite' # type: str
self.parameters = parameters
class UrlRewriteActionParameters(msrest.serialization.Model):
"""Defines the parameters for the url rewrite action.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlRewriteActionParameters".
:vartype odata_type: str
:param source_pattern: Required. define a request URI pattern that identifies the type of
requests that may be rewritten. If value is blank, all strings are matched.
:type source_pattern: str
:param destination: Required. Define the relative URL to which the above requests will be
rewritten by.
:type destination: str
:param preserve_unmatched_path: Whether to preserve unmatched path. Default value is true.
:type preserve_unmatched_path: bool
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
'source_pattern': {'required': True},
'destination': {'required': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'source_pattern': {'key': 'sourcePattern', 'type': 'str'},
'destination': {'key': 'destination', 'type': 'str'},
'preserve_unmatched_path': {'key': 'preserveUnmatchedPath', 'type': 'bool'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlRewriteActionParameters"
def __init__(
self,
*,
source_pattern: str,
destination: str,
preserve_unmatched_path: Optional[bool] = None,
**kwargs
):
super(UrlRewriteActionParameters, self).__init__(**kwargs)
self.source_pattern = source_pattern
self.destination = destination
self.preserve_unmatched_path = preserve_unmatched_path
class UrlSigningAction(DeliveryRuleAction):
"""Defines the url signing action for the delivery rule.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the action for the delivery rule.Constant filled by server.
Possible values include: "CacheExpiration", "CacheKeyQueryString", "ModifyRequestHeader",
"ModifyResponseHeader", "UrlRedirect", "UrlRewrite", "UrlSigning", "OriginGroupOverride".
:type name: str or ~azure.mgmt.cdn.models.DeliveryRuleActionEnum
:param parameters: Required. Defines the parameters for the action.
:type parameters: ~azure.mgmt.cdn.models.UrlSigningActionParameters
"""
_validation = {
'name': {'required': True},
'parameters': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'UrlSigningActionParameters'},
}
def __init__(
self,
*,
parameters: "UrlSigningActionParameters",
**kwargs
):
super(UrlSigningAction, self).__init__(**kwargs)
self.name = 'UrlSigning' # type: str
self.parameters = parameters
class UrlSigningActionParameters(msrest.serialization.Model):
"""Defines the parameters for the Url Signing action.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar odata_type: Required. Default value:
"#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlSigningActionParameters".
:vartype odata_type: str
:param algorithm: Algorithm to use for URL signing. Possible values include: "SHA256".
:type algorithm: str or ~azure.mgmt.cdn.models.Algorithm
:param parameter_name_override: Defines which query string parameters in the url to be
considered for expires, key id etc.
:type parameter_name_override: list[~azure.mgmt.cdn.models.UrlSigningParamIdentifier]
"""
_validation = {
'odata_type': {'required': True, 'constant': True},
}
_attribute_map = {
'odata_type': {'key': '@odata\\.type', 'type': 'str'},
'algorithm': {'key': 'algorithm', 'type': 'str'},
'parameter_name_override': {'key': 'parameterNameOverride', 'type': '[UrlSigningParamIdentifier]'},
}
odata_type = "#Microsoft.Azure.Cdn.Models.DeliveryRuleUrlSigningActionParameters"
def __init__(
self,
*,
algorithm: Optional[Union[str, "Algorithm"]] = None,
parameter_name_override: Optional[List["UrlSigningParamIdentifier"]] = None,
**kwargs
):
super(UrlSigningActionParameters, self).__init__(**kwargs)
self.algorithm = algorithm
self.parameter_name_override = parameter_name_override
class UrlSigningKey(msrest.serialization.Model):
"""Url signing key.
All required parameters must be populated in order to send to Azure.
:param key_id: Required. Defines the customer defined key Id. This id will exist in the
incoming request to indicate the key used to form the hash.
:type key_id: str
:param key_source_parameters: Required. Defines the parameters for using customer key vault for
Url Signing Key.
:type key_source_parameters: ~azure.mgmt.cdn.models.KeyVaultSigningKeyParameters
"""
_validation = {
'key_id': {'required': True},
'key_source_parameters': {'required': True},
}
_attribute_map = {
'key_id': {'key': 'keyId', 'type': 'str'},
'key_source_parameters': {'key': 'keySourceParameters', 'type': 'KeyVaultSigningKeyParameters'},
}
def __init__(
self,
*,
key_id: str,
key_source_parameters: "KeyVaultSigningKeyParameters",
**kwargs
):
super(UrlSigningKey, self).__init__(**kwargs)
self.key_id = key_id
self.key_source_parameters = key_source_parameters
class UrlSigningKeyParameters(SecretParameters):
"""Url signing key parameters.
All required parameters must be populated in order to send to Azure.
:param type: Required. The type of the Secret to create.Constant filled by server. Possible
values include: "UrlSigningKey", "CustomerCertificate", "ManagedCertificate".
:type type: str or ~azure.mgmt.cdn.models.SecretType
:param key_id: Required. Defines the customer defined key Id. This id will exist in the
incoming request to indicate the key used to form the hash.
:type key_id: str
:param secret_source: Required. Resource reference to the KV secret.
:type secret_source: ~azure.mgmt.cdn.models.ResourceReference
:param secret_version: Version of the secret to be used.
:type secret_version: str
"""
_validation = {
'type': {'required': True},
'key_id': {'required': True},
'secret_source': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'key_id': {'key': 'keyId', 'type': 'str'},
'secret_source': {'key': 'secretSource', 'type': 'ResourceReference'},
'secret_version': {'key': 'secretVersion', 'type': 'str'},
}
def __init__(
self,
*,
key_id: str,
secret_source: "ResourceReference",
secret_version: Optional[str] = None,
**kwargs
):
super(UrlSigningKeyParameters, self).__init__(**kwargs)
self.type = 'UrlSigningKey' # type: str
self.key_id = key_id
self.secret_source = secret_source
self.secret_version = secret_version
class UrlSigningParamIdentifier(msrest.serialization.Model):
"""Defines how to identify a parameter for a specific purpose e.g. expires.
All required parameters must be populated in order to send to Azure.
:param param_indicator: Required. Indicates the purpose of the parameter. Possible values
include: "Expires", "KeyId", "Signature".
:type param_indicator: str or ~azure.mgmt.cdn.models.ParamIndicator
:param param_name: Required. Parameter name.
:type param_name: str
"""
_validation = {
'param_indicator': {'required': True},
'param_name': {'required': True},
}
_attribute_map = {
'param_indicator': {'key': 'paramIndicator', 'type': 'str'},
'param_name': {'key': 'paramName', 'type': 'str'},
}
def __init__(
self,
*,
param_indicator: Union[str, "ParamIndicator"],
param_name: str,
**kwargs
):
super(UrlSigningParamIdentifier, self).__init__(**kwargs)
self.param_indicator = param_indicator
self.param_name = param_name
class Usage(msrest.serialization.Model):
"""Describes resource usage.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource identifier.
:vartype id: str
:param unit: Required. An enum describing the unit of measurement. Possible values include:
"Count".
:type unit: str or ~azure.mgmt.cdn.models.UsageUnit
:param current_value: Required. The current value of the usage.
:type current_value: long
:param limit: Required. The limit of usage.
:type limit: long
:param name: Required. The name of the type of usage.
:type name: ~azure.mgmt.cdn.models.UsageName
"""
_validation = {
'id': {'readonly': True},
'unit': {'required': True},
'current_value': {'required': True},
'limit': {'required': True},
'name': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'long'},
'limit': {'key': 'limit', 'type': 'long'},
'name': {'key': 'name', 'type': 'UsageName'},
}
def __init__(
self,
*,
unit: Union[str, "UsageUnit"],
current_value: int,
limit: int,
name: "UsageName",
**kwargs
):
super(Usage, self).__init__(**kwargs)
self.id = None
self.unit = unit
self.current_value = current_value
self.limit = limit
self.name = name
class UsageName(msrest.serialization.Model):
"""The usage names.
:param value: A string describing the resource name.
:type value: str
:param localized_value: A localized string describing the resource name.
:type localized_value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[str] = None,
localized_value: Optional[str] = None,
**kwargs
):
super(UsageName, self).__init__(**kwargs)
self.value = value
self.localized_value = localized_value
class UsagesListResult(msrest.serialization.Model):
"""The list usages operation response.
:param value: The list of resource usages.
:type value: list[~azure.mgmt.cdn.models.Usage]
:param next_link: URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Usage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Usage"]] = None,
next_link: Optional[str] = None,
**kwargs
):
super(UsagesListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class UserManagedHttpsParameters(CustomDomainHttpsParameters):
"""Defines the certificate source parameters using user's keyvault certificate for enabling SSL.
All required parameters must be populated in order to send to Azure.
:param certificate_source: Required. Defines the source of the SSL certificate.Constant filled
by server. Possible values include: "AzureKeyVault", "Cdn".
:type certificate_source: str or ~azure.mgmt.cdn.models.CertificateSource
:param protocol_type: Required. Defines the TLS extension protocol that is used for secure
delivery. Possible values include: "ServerNameIndication", "IPBased".
:type protocol_type: str or ~azure.mgmt.cdn.models.ProtocolType
:param minimum_tls_version: TLS protocol version that will be used for Https. Possible values
include: "None", "TLS10", "TLS12".
:type minimum_tls_version: str or ~azure.mgmt.cdn.models.MinimumTlsVersion
:param certificate_source_parameters: Required. Defines the certificate source parameters using
user's keyvault certificate for enabling SSL.
:type certificate_source_parameters: ~azure.mgmt.cdn.models.KeyVaultCertificateSourceParameters
"""
_validation = {
'certificate_source': {'required': True},
'protocol_type': {'required': True},
'certificate_source_parameters': {'required': True},
}
_attribute_map = {
'certificate_source': {'key': 'certificateSource', 'type': 'str'},
'protocol_type': {'key': 'protocolType', 'type': 'str'},
'minimum_tls_version': {'key': 'minimumTlsVersion', 'type': 'str'},
'certificate_source_parameters': {'key': 'certificateSourceParameters', 'type': 'KeyVaultCertificateSourceParameters'},
}
def __init__(
self,
*,
protocol_type: Union[str, "ProtocolType"],
certificate_source_parameters: "KeyVaultCertificateSourceParameters",
minimum_tls_version: Optional[Union[str, "MinimumTlsVersion"]] = None,
**kwargs
):
super(UserManagedHttpsParameters, self).__init__(protocol_type=protocol_type, minimum_tls_version=minimum_tls_version, **kwargs)
self.certificate_source = 'AzureKeyVault' # type: str
self.certificate_source_parameters = certificate_source_parameters
class ValidateCustomDomainInput(msrest.serialization.Model):
"""Input of the custom domain to be validated for DNS mapping.
All required parameters must be populated in order to send to Azure.
:param host_name: Required. The host name of the custom domain. Must be a domain name.
:type host_name: str
"""
_validation = {
'host_name': {'required': True},
}
_attribute_map = {
'host_name': {'key': 'hostName', 'type': 'str'},
}
def __init__(
self,
*,
host_name: str,
**kwargs
):
super(ValidateCustomDomainInput, self).__init__(**kwargs)
self.host_name = host_name
class ValidateCustomDomainOutput(msrest.serialization.Model):
"""Output of custom domain validation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar custom_domain_validated: Indicates whether the custom domain is valid or not.
:vartype custom_domain_validated: bool
:ivar reason: The reason why the custom domain is not valid.
:vartype reason: str
:ivar message: Error message describing why the custom domain is not valid.
:vartype message: str
"""
_validation = {
'custom_domain_validated': {'readonly': True},
'reason': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'custom_domain_validated': {'key': 'customDomainValidated', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ValidateCustomDomainOutput, self).__init__(**kwargs)
self.custom_domain_validated = None
self.reason = None
self.message = None
class ValidateProbeInput(msrest.serialization.Model):
"""Input of the validate probe API.
All required parameters must be populated in order to send to Azure.
:param probe_url: Required. The probe URL to validate.
:type probe_url: str
"""
_validation = {
'probe_url': {'required': True},
}
_attribute_map = {
'probe_url': {'key': 'probeURL', 'type': 'str'},
}
def __init__(
self,
*,
probe_url: str,
**kwargs
):
super(ValidateProbeInput, self).__init__(**kwargs)
self.probe_url = probe_url
class ValidateProbeOutput(msrest.serialization.Model):
"""Output of the validate probe API.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar is_valid: Indicates whether the probe URL is accepted or not.
:vartype is_valid: bool
:ivar error_code: Specifies the error code when the probe url is not accepted.
:vartype error_code: str
:ivar message: The detailed error message describing why the probe URL is not accepted.
:vartype message: str
"""
_validation = {
'is_valid': {'readonly': True},
'error_code': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'is_valid': {'key': 'isValid', 'type': 'bool'},
'error_code': {'key': 'errorCode', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ValidateProbeOutput, self).__init__(**kwargs)
self.is_valid = None
self.error_code = None
self.message = None
class ValidateSecretInput(msrest.serialization.Model):
"""Input of the secret to be validated.
All required parameters must be populated in order to send to Azure.
:param secret_source: Required. The secret source.
:type secret_source: ~azure.mgmt.cdn.models.ResourceReference
:param secret_type: Required. The secret type. Possible values include: "UrlSigningKey",
"ManagedCertificate", "CustomerCertificate".
:type secret_type: str or ~azure.mgmt.cdn.models.ValidateSecretType
"""
_validation = {
'secret_source': {'required': True},
'secret_type': {'required': True},
}
_attribute_map = {
'secret_source': {'key': 'secretSource', 'type': 'ResourceReference'},
'secret_type': {'key': 'secretType', 'type': 'str'},
}
def __init__(
self,
*,
secret_source: "ResourceReference",
secret_type: Union[str, "ValidateSecretType"],
**kwargs
):
super(ValidateSecretInput, self).__init__(**kwargs)
self.secret_source = secret_source
self.secret_type = secret_type
class ValidateSecretOutput(msrest.serialization.Model):
"""Output of the validated secret.
:param status: The validation status. Possible values include: "Valid", "Invalid",
"AccessDenied", "CertificateExpired".
:type status: str or ~azure.mgmt.cdn.models.Status
:param message: Detailed error message.
:type message: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[Union[str, "Status"]] = None,
message: Optional[str] = None,
**kwargs
):
super(ValidateSecretOutput, self).__init__(**kwargs)
self.status = status
self.message = message
class ValidationToken(msrest.serialization.Model):
"""The validation token.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar token:
:vartype token: str
"""
_validation = {
'token': {'readonly': True},
}
_attribute_map = {
'token': {'key': 'token', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ValidationToken, self).__init__(**kwargs)
self.token = None
class WafMetricsResponse(msrest.serialization.Model):
"""Waf Metrics Response.
:param date_time_begin:
:type date_time_begin: ~datetime.datetime
:param date_time_end:
:type date_time_end: ~datetime.datetime
:param granularity: Possible values include: "PT5M", "PT1H", "P1D".
:type granularity: str or ~azure.mgmt.cdn.models.WafMetricsResponseGranularity
:param series:
:type series: list[~azure.mgmt.cdn.models.WafMetricsResponseSeriesItem]
"""
_attribute_map = {
'date_time_begin': {'key': 'dateTimeBegin', 'type': 'iso-8601'},
'date_time_end': {'key': 'dateTimeEnd', 'type': 'iso-8601'},
'granularity': {'key': 'granularity', 'type': 'str'},
'series': {'key': 'series', 'type': '[WafMetricsResponseSeriesItem]'},
}
def __init__(
self,
*,
date_time_begin: Optional[datetime.datetime] = None,
date_time_end: Optional[datetime.datetime] = None,
granularity: Optional[Union[str, "WafMetricsResponseGranularity"]] = None,
series: Optional[List["WafMetricsResponseSeriesItem"]] = None,
**kwargs
):
super(WafMetricsResponse, self).__init__(**kwargs)
self.date_time_begin = date_time_begin
self.date_time_end = date_time_end
self.granularity = granularity
self.series = series
class WafMetricsResponseSeriesItem(msrest.serialization.Model):
"""WafMetricsResponseSeriesItem.
Variables are only populated by the server, and will be ignored when sending a request.
:param metric:
:type metric: str
:ivar unit: Default value: "count".
:vartype unit: str
:param groups:
:type groups: list[~azure.mgmt.cdn.models.WafMetricsResponseSeriesPropertiesItemsItem]
:param data:
:type data:
list[~azure.mgmt.cdn.models.Components18OrqelSchemasWafmetricsresponsePropertiesSeriesItemsPropertiesDataItems]
"""
_validation = {
'unit': {'constant': True},
}
_attribute_map = {
'metric': {'key': 'metric', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'groups': {'key': 'groups', 'type': '[WafMetricsResponseSeriesPropertiesItemsItem]'},
'data': {'key': 'data', 'type': '[Components18OrqelSchemasWafmetricsresponsePropertiesSeriesItemsPropertiesDataItems]'},
}
unit = "count"
def __init__(
self,
*,
metric: Optional[str] = None,
groups: Optional[List["WafMetricsResponseSeriesPropertiesItemsItem"]] = None,
data: Optional[List["Components18OrqelSchemasWafmetricsresponsePropertiesSeriesItemsPropertiesDataItems"]] = None,
**kwargs
):
super(WafMetricsResponseSeriesItem, self).__init__(**kwargs)
self.metric = metric
self.groups = groups
self.data = data
class WafMetricsResponseSeriesPropertiesItemsItem(msrest.serialization.Model):
"""WafMetricsResponseSeriesPropertiesItemsItem.
:param name:
:type name: str
:param value:
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
super(WafMetricsResponseSeriesPropertiesItemsItem, self).__init__(**kwargs)
self.name = name
self.value = value
class WafRankingsResponse(msrest.serialization.Model):
"""Waf Rankings Response.
:param date_time_begin:
:type date_time_begin: ~datetime.datetime
:param date_time_end:
:type date_time_end: ~datetime.datetime
:param groups:
:type groups: list[str]
:param data:
:type data: list[~azure.mgmt.cdn.models.WafRankingsResponseDataItem]
"""
_attribute_map = {
'date_time_begin': {'key': 'dateTimeBegin', 'type': 'iso-8601'},
'date_time_end': {'key': 'dateTimeEnd', 'type': 'iso-8601'},
'groups': {'key': 'groups', 'type': '[str]'},
'data': {'key': 'data', 'type': '[WafRankingsResponseDataItem]'},
}
def __init__(
self,
*,
date_time_begin: Optional[datetime.datetime] = None,
date_time_end: Optional[datetime.datetime] = None,
groups: Optional[List[str]] = None,
data: Optional[List["WafRankingsResponseDataItem"]] = None,
**kwargs
):
super(WafRankingsResponse, self).__init__(**kwargs)
self.date_time_begin = date_time_begin
self.date_time_end = date_time_end
self.groups = groups
self.data = data
class WafRankingsResponseDataItem(msrest.serialization.Model):
"""WafRankingsResponseDataItem.
:param group_values:
:type group_values: list[str]
:param metrics:
:type metrics:
list[~azure.mgmt.cdn.models.ComponentsKpo1PjSchemasWafrankingsresponsePropertiesDataItemsPropertiesMetricsItems]
"""
_attribute_map = {
'group_values': {'key': 'groupValues', 'type': '[str]'},
'metrics': {'key': 'metrics', 'type': '[ComponentsKpo1PjSchemasWafrankingsresponsePropertiesDataItemsPropertiesMetricsItems]'},
}
def __init__(
self,
*,
group_values: Optional[List[str]] = None,
metrics: Optional[List["ComponentsKpo1PjSchemasWafrankingsresponsePropertiesDataItemsPropertiesMetricsItems"]] = None,
**kwargs
):
super(WafRankingsResponseDataItem, self).__init__(**kwargs)
self.group_values = group_values
self.metrics = metrics
| 42.132543 | 726 | 0.678309 | 43,617 | 397,984 | 6.009698 | 0.030424 | 0.017331 | 0.018999 | 0.028498 | 0.798642 | 0.770464 | 0.747414 | 0.726459 | 0.713717 | 0.701936 | 0 | 0.002058 | 0.208817 | 397,984 | 9,445 | 727 | 42.137004 | 0.830264 | 0.434533 | 0 | 0.712155 | 0 | 0.000202 | 0.265668 | 0.095986 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042733 | false | 0 | 0.001008 | 0 | 0.16791 | 0.001814 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b85f5bf21d951133073c62a074e4264bdc6cbcdc | 108 | py | Python | hello_world.py | Hrushabhs/My-Pycode | c27549b89a7848827117e42d324d383732adfac5 | [
"Apache-2.0"
] | null | null | null | hello_world.py | Hrushabhs/My-Pycode | c27549b89a7848827117e42d324d383732adfac5 | [
"Apache-2.0"
] | null | null | null | hello_world.py | Hrushabhs/My-Pycode | c27549b89a7848827117e42d324d383732adfac5 | [
"Apache-2.0"
] | null | null | null | print "Hello world by Hrushabhs"
def hel():
(print"Hello Hrushabhs")
hel()
print"hello"+" "+ "Sirsulwar"
| 18 | 33 | 0.675926 | 14 | 108 | 5.214286 | 0.571429 | 0.410959 | 0.356164 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.148148 | 108 | 6 | 34 | 18 | 0.793478 | 0 | 0 | 0 | 0 | 0 | 0.495413 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.6 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
b8c0e600db223955d23ddc35bb21138afa069cd7 | 61 | py | Python | src/mailer/__main__.py | XtremeGood/Mailer | b530dfe0400150859e9dc6d8b1cae857f8ebbcd9 | [
"MIT"
] | 1 | 2020-10-14T08:54:58.000Z | 2020-10-14T08:54:58.000Z | src/mailer/__main__.py | XtremeGood/Mailer | b530dfe0400150859e9dc6d8b1cae857f8ebbcd9 | [
"MIT"
] | null | null | null | src/mailer/__main__.py | XtremeGood/Mailer | b530dfe0400150859e9dc6d8b1cae857f8ebbcd9 | [
"MIT"
] | 1 | 2020-04-25T14:45:46.000Z | 2020-04-25T14:45:46.000Z | import sys
import mailer.main
sys.exit(mailer.main.main())
| 10.166667 | 28 | 0.754098 | 10 | 61 | 4.6 | 0.5 | 0.434783 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.114754 | 61 | 5 | 29 | 12.2 | 0.851852 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
b2318ca11b86230f9b608df18445283f7c76d7b4 | 4,464 | py | Python | tests/app/guild_helper_bot/test_report_command.py | ricardochaves/chat-wars-database | 597f192fb6ddf290c6c7477cf8c7d0ca654925f6 | [
"MIT"
] | 1 | 2019-12-30T19:16:52.000Z | 2019-12-30T19:16:52.000Z | tests/app/guild_helper_bot/test_report_command.py | ricardochaves/chat-wars-database | 597f192fb6ddf290c6c7477cf8c7d0ca654925f6 | [
"MIT"
] | null | null | null | tests/app/guild_helper_bot/test_report_command.py | ricardochaves/chat-wars-database | 597f192fb6ddf290c6c7477cf8c7d0ca654925f6 | [
"MIT"
] | null | null | null | from django.test import TestCase
from django.utils import timezone
from chat_wars_database.app.business_core.models import Item
from chat_wars_database.app.guild_helper_bot.commands import _execute_deposit
from chat_wars_database.app.guild_helper_bot.commands import _execute_report
from chat_wars_database.app.guild_helper_bot.models import Guild
from chat_wars_database.app.guild_helper_bot.models import Message
from chat_wars_database.app.guild_helper_bot.models import TelegramUser
from chat_wars_database.app.guild_helper_bot.models import UserDeposits
class TestReportCommand(TestCase):
def setUp(self) -> None:
item = Item.objects.create(name="Magic Stone", command="13")
telegram_user_data = {
"user_name": "ricardo",
"name": "@ricardochaves",
"telegram_id": 1234,
}
user = TelegramUser.objects.create(**telegram_user_data)
self.guild = Guild.objects.create(name="test", captain=user)
message_data = {
"chat_id": 65432,
"forward_date": timezone.now(),
"message_text": "Deposited successfully: Sanguine Parsley (2)",
"message_id": 123,
"telegram_user_id": user.id,
}
message = Message.objects.create(**message_data)
user_deposits_data = {
"telegram_user": user,
"message": message,
"item": item,
"total": 10,
}
UserDeposits.objects.create(**user_deposits_data)
def test_should_create_a_report(self):
self.assertIn("What was deposited in the last 7 days.", _execute_report(["/rw"], self.guild))
self.assertIn("What was deposited in the last 30 days.", _execute_report(["/rm"], self.guild))
self.assertIn("What was deposited in the last 365 days.", _execute_report(["/ry"], self.guild))
self.assertIn("What was deposited in the last 7 days.", _execute_report(["/rw", "13"], self.guild))
self.assertIn("What was deposited in the last 30 days.", _execute_report(["/rm", "13"], self.guild))
self.assertIn("What was deposited in the last 365 days.", _execute_report(["/ry", "13"], self.guild))
self.assertIn("The report is for the item 13", _execute_report(["/rw", "13"], self.guild))
self.assertIn("The report is for the item 13", _execute_report(["/rm", "13"], self.guild))
self.assertIn("The report is for the item 13", _execute_report(["/ry", "13"], self.guild))
self.assertIn("What was deposited in the last 7 days.", _execute_report(["/rw", "@ricardochaves"], self.guild))
self.assertIn("What was deposited in the last 30 days.", _execute_report(["/rm", "@ricardochaves"], self.guild))
self.assertIn(
"What was deposited in the last 365 days.", _execute_report(["/ry", "@ricardochaves"], self.guild)
)
self.assertIn("Deposits were made by @ricardochaves", _execute_report(["/rw", "@ricardochaves"], self.guild))
self.assertIn("Deposits were made by @ricardochaves", _execute_report(["/rm", "@ricardochaves"], self.guild))
self.assertIn("Deposits were made by @ricardochaves", _execute_report(["/ry", "@ricardochaves"], self.guild))
self.assertIn(
"What was deposited in the last 7 days.", _execute_report(["/rw", "13", "@ricardochaves"], self.guild)
)
self.assertIn(
"What was deposited in the last 30 days.", _execute_report(["/rm", "13", "@ricardochaves"], self.guild)
)
self.assertIn(
"What was deposited in the last 365 days.", _execute_report(["/ry", "13", "@ricardochaves"], self.guild)
)
self.assertIn(
"Deposits were made by @ricardochaves", _execute_report(["/rw", "@ricardochaves", "13"], self.guild)
)
self.assertIn(
"Deposits were made by @ricardochaves", _execute_report(["/rm", "@ricardochaves", "13"], self.guild)
)
self.assertIn(
"Deposits were made by @ricardochaves", _execute_report(["/ry", "@ricardochaves", "13"], self.guild)
)
self.assertIn("The report is for the item 13", _execute_report(["/rw", "@ricardochaves", "13"], self.guild))
self.assertIn("The report is for the item 13", _execute_report(["/rm", "@ricardochaves", "13"], self.guild))
self.assertIn("The report is for the item 13", _execute_report(["/ry", "@ricardochaves", "13"], self.guild))
| 54.439024 | 120 | 0.648073 | 552 | 4,464 | 5.056159 | 0.144928 | 0.116446 | 0.10713 | 0.173056 | 0.759584 | 0.751344 | 0.751344 | 0.742745 | 0.742745 | 0.742745 | 0 | 0.023553 | 0.210573 | 4,464 | 81 | 121 | 55.111111 | 0.768445 | 0 | 0 | 0.09589 | 0 | 0 | 0.305108 | 0 | 0 | 0 | 0 | 0 | 0.328767 | 1 | 0.027397 | false | 0 | 0.123288 | 0 | 0.164384 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b24c7a219b3303ac95e680b700d787cc6c080d78 | 2,081 | py | Python | test/programytest/utils/language/test_default.py | motazsaad/fit-bot-fb-clt | 580477aa1ec91855b621d9ae276f2705962f6a87 | [
"MIT"
] | 345 | 2016-11-23T22:37:04.000Z | 2022-03-30T20:44:44.000Z | test/programytest/utils/language/test_default.py | MikeyBeez/program-y | 00d7a0c7d50062f18f0ab6f4a041068e119ef7f0 | [
"MIT"
] | 275 | 2016-12-07T10:30:28.000Z | 2022-02-08T21:28:33.000Z | test/programytest/utils/language/test_default.py | VProgramMist/modified-program-y | f32efcafafd773683b3fe30054d5485fe9002b7d | [
"MIT"
] | 159 | 2016-11-28T18:59:30.000Z | 2022-03-20T18:02:44.000Z | import unittest
from programy.utils.language.default import DefaultLangauge
#############################################################################
#
class DefaultTests(unittest.TestCase):
def test_split_into_sentences(self):
sentences = DefaultLangauge.split_into_sentences("")
self.assertEqual([], sentences)
sentences = DefaultLangauge.split_into_sentences("Hello")
self.assertEqual(["Hello"], sentences)
sentences = DefaultLangauge.split_into_sentences("Hello World")
self.assertEqual(["Hello World"], sentences)
sentences = DefaultLangauge.split_into_sentences("Hello, World")
self.assertEqual(["Hello, World"], sentences)
sentences = DefaultLangauge.split_into_sentences("Hello, World!")
self.assertEqual(["Hello, World"], sentences)
sentences = DefaultLangauge.split_into_sentences("Hello. World")
self.assertEqual(["Hello", "World"], sentences)
sentences = DefaultLangauge.split_into_sentences("Hello? World")
self.assertEqual(["Hello", "World"], sentences)
sentences = DefaultLangauge.split_into_sentences("Hello. World.?!")
self.assertEqual(["Hello", "World"], sentences)
sentences = DefaultLangauge.split_into_sentences("!Hello. World")
self.assertEqual(["Hello", "World"], sentences)
sentences = DefaultLangauge.split_into_sentences("半宽韩文字母")
self.assertEqual(["半宽韩文字母"], sentences)
sentences = DefaultLangauge.split_into_sentences("半宽韩文字母. 半宽平假名")
self.assertEqual(["半宽韩文字母", "半宽平假名"], sentences)
def test_split_into_words(self):
words = DefaultLangauge.split_into_words("")
self.assertEqual([], words)
words = DefaultLangauge.split_into_words("Hello")
self.assertEqual(["Hello"], words)
words = DefaultLangauge.split_into_words("Hello World")
self.assertEqual(["Hello", "World"], words)
words = DefaultLangauge.split_into_words(" Hello World ")
self.assertEqual(["Hello", "World"], words) | 37.160714 | 77 | 0.656415 | 191 | 2,081 | 6.963351 | 0.120419 | 0.135338 | 0.270677 | 0.272932 | 0.785714 | 0.728571 | 0.728571 | 0.606015 | 0.606015 | 0.606015 | 0 | 0 | 0.184046 | 2,081 | 56 | 78 | 37.160714 | 0.783274 | 0 | 0 | 0.285714 | 0 | 0 | 0.134232 | 0 | 0 | 0 | 0 | 0 | 0.428571 | 1 | 0.057143 | false | 0 | 0.057143 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b24f71c2eb60cb5c14dc72d11278ca4aaf078087 | 4,059 | py | Python | sidebars/clustering_sidebars.py | udaykiran1809/NoCodeAIML | ad40e4176d112f3ac1c2e9639e045b59d9aa9f50 | [
"Apache-2.0"
] | 7 | 2021-10-02T02:30:42.000Z | 2022-01-28T19:50:22.000Z | sidebars/clustering_sidebars.py | udaykiran1809/NoCodeAIML | ad40e4176d112f3ac1c2e9639e045b59d9aa9f50 | [
"Apache-2.0"
] | null | null | null | sidebars/clustering_sidebars.py | udaykiran1809/NoCodeAIML | ad40e4176d112f3ac1c2e9639e045b59d9aa9f50 | [
"Apache-2.0"
] | 3 | 2021-10-02T08:06:09.000Z | 2021-11-11T17:46:22.000Z | import streamlit as st
DISTANCE_METRICS = {
"Euclidean": "euclidean",
"Manhattan": "manhattan"
}
SEARCH_ALGORITHM = {
"Auto": "auto",
"BallTree": "ball_tree",
"KDTree": "kd_tree",
"Brute-Force search": "brute"
}
def DBSCAN_sidebar():
inputs = {}
with st.sidebar:
st.write("## Input data")
inputs["data"] = st.selectbox(
"Which data set do you want to use?",
("Synthetic data",),
)
if inputs["data"] == "Synthetic data":
inputs['n_samples'] = st.number_input(
"number of data points", 100, None, 1000,
)
inputs['n_features'] = st.number_input(
"number of features in data set", 2, None, 2,
)
inputs['n_centers'] = st.number_input(
"number of cluster centers", 3, None, 3,
)
inputs['cluster_std'] = st.number_input(
"Cluster standard deviation", 0.1, None, 1.0,
)
st.write("## Model Hyperparameter")
inputs['eps'] = st.number_input(
"epsilon?", 0.1, None, 0.5, step=0.1)
inputs['min_samples'] = st.number_input(
"Min Samples?", 1, None, 5,
)
dist = st.selectbox(
"Which distance metric do you want to use?",
list(DISTANCE_METRICS.keys()))
inputs["metric"] = DISTANCE_METRICS[dist]
algo = st.selectbox(
"Which nearest neighbor search algorithm do you want to use?",
list(SEARCH_ALGORITHM.keys()))
inputs["search_algo"] = SEARCH_ALGORITHM[algo]
return inputs
def KMEANS_sidebar():
inputs = {}
with st.sidebar:
st.write("## Input data")
inputs["data"] = st.selectbox(
"Which data set do you want to use?",
("Synthetic data",),
)
if inputs["data"] == "Synthetic data":
inputs['n_samples'] = st.number_input(
"number of data points", 100, None, 1000,
)
inputs['n_features'] = st.number_input(
"number of features in data set", 2, None, 2,
)
inputs['n_centers'] = st.number_input(
"number of cluster centers", 3, None, 3,
)
inputs['cluster_std'] = st.number_input(
"Cluster standard deviation", 0.1, None, 1.0,
)
st.write("## Model Hyperparameter")
inputs['n_centroids'] = st.number_input(
"number of centroids", 2, None, 3,
)
inputs['max_iter'] = st.number_input(
"Maximum number of interation to perform in single run?", 100, None, 300,
)
return inputs
def OPTICS_sidebar():
inputs = {}
with st.sidebar:
st.write("## Input data")
inputs["data"] = st.selectbox(
"Which data set do you want to use?",
("Synthetic data",),
)
if inputs["data"] == "Synthetic data":
inputs['n_samples'] = st.number_input(
"number of data points", 100, None, 1000,
)
inputs['n_features'] = st.number_input(
"number of features in data set", 2, None, 2,
)
inputs['n_centers'] = st.number_input(
"number of cluster centers", 3, None, 3,
)
inputs['cluster_std'] = st.number_input(
"Cluster standard deviation", 0.1, None, 1.0,
)
st.write("## Model Hyperparameter")
inputs['min_samples'] = st.number_input(
"number of samples in neighborhood", 1, None, 5)
dist = st.selectbox(
"Which distance metric do you want to use?",
list(DISTANCE_METRICS.keys()))
inputs["metric"] = DISTANCE_METRICS[dist]
algo = st.selectbox(
"Which nearest neighbor search algorithm do you want to use?",
list(SEARCH_ALGORITHM.keys()))
inputs["search_algo"] = SEARCH_ALGORITHM[algo]
return inputs
| 35.295652 | 85 | 0.527223 | 452 | 4,059 | 4.623894 | 0.181416 | 0.065072 | 0.105742 | 0.1 | 0.830144 | 0.820096 | 0.791388 | 0.791388 | 0.791388 | 0.791388 | 0 | 0.023702 | 0.345159 | 4,059 | 114 | 86 | 35.605263 | 0.762603 | 0 | 0 | 0.633028 | 0 | 0 | 0.305987 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027523 | false | 0 | 0.009174 | 0 | 0.06422 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b25c0df5bd70fe79122328b416562574cf4139f4 | 41,140 | py | Python | basty/project/behavior_mapping.py | bo1929/basty | 3ef84578e0154509346fdc2c0c56261448d78276 | [
"MIT"
] | 5 | 2021-12-10T17:43:52.000Z | 2022-03-01T22:19:36.000Z | basty/project/behavior_mapping.py | bo1929/basty | 3ef84578e0154509346fdc2c0c56261448d78276 | [
"MIT"
] | null | null | null | basty/project/behavior_mapping.py | bo1929/basty | 3ef84578e0154509346fdc2c0c56261448d78276 | [
"MIT"
] | null | null | null | from collections import defaultdict
import numpy as np
import umap
from hdbscan import HDBSCAN # approximate_predict,
from hdbscan import all_points_membership_vectors, membership_vector
from sklearn.preprocessing import normalize, scale
from tqdm import tqdm
import basty.utils.misc as misc
from basty.project.experiment_processing import Project
EPS = 10 ** (-5)
class BehaviorMixin(Project):
def __init__(
self,
main_cfg_path,
**kwargs,
):
Project.__init__(self, main_cfg_path, **kwargs)
self.init_behavior_mapping_postprocessing_kwargs(**kwargs)
def is_compatible_approach(self, expt_name1, name1, expt_name2, name2):
if expt_name1 in name2 and expt_name2 in name1:
approach1 = name2.replace(expt_name1, "").replace(expt_name2, "")
approach2 = name1.replace(expt_name2, "").replace(expt_name1, "")
else:
approach1 = name1
approach2 = name2
compatible = approach1 == approach2
if not compatible:
self.logger.direct_error(
f"Given approaches {approach1} and {approach2}) are not same."
"Hence they are not compatible."
)
return compatible
class BehaviorEmbedding(BehaviorMixin):
def __init__(
self,
main_cfg_path,
**kwargs,
):
BehaviorMixin.__init__(self, main_cfg_path, **kwargs)
self.init_behavior_embeddings_kwargs(**kwargs)
def _update_expt_record(self, expt_path, embedding_name):
expt_record = self._load_joblib_object(expt_path, "expt_record.z")
if self.use_annotations_to_mask and expt_record.has_annotation:
expt_record.use_annotations_to_mask[embedding_name] = True
else:
expt_record.use_annotations_to_mask[embedding_name] = False
self._save_joblib_object(expt_record, expt_path, "expt_record.z")
@misc.timeit
def compute_behavior_embedding(self, unannotated_expt_names, annotated_expt_names):
all_valid_expt_names = list(self.expt_path_dict.keys())
is_unannotated_valid = all(
[expt_name in all_valid_expt_names for expt_name in unannotated_expt_names]
)
is_annotated_valid = all(
[expt_name in all_valid_expt_names for expt_name in annotated_expt_names]
)
assert is_unannotated_valid and is_annotated_valid
assert unannotated_expt_names or annotated_expt_names
assert not (bool(set(unannotated_expt_names) & set(annotated_expt_names)))
X_expt_dict = defaultdict()
y_expt_dict = defaultdict()
expt_indices_dict = defaultdict(tuple)
def iterate_expt_for_embedding(expt_name):
expt_path = self.expt_path_dict[expt_name]
expt_record = self._load_joblib_object(expt_path, "expt_record.z")
X_expt = self._load_numpy_array(expt_path, "behavioral_reprs.npy")
return X_expt, expt_record, expt_path
prev = 0
for expt_name in unannotated_expt_names:
X_expt, expt_record, expt_path = iterate_expt_for_embedding(expt_name)
y_expt = np.zeros(X_expt.shape[0], dtype=int) - 1
if self.use_annotations_to_mask and expt_record.has_annotation:
mask_active = expt_record.mask_annotated
else:
mask_active = expt_record.mask_active
mask_dormant = expt_record.mask_dormant
X_expt_dict[expt_name] = X_expt[mask_dormant & mask_active]
y_expt_dict[expt_name] = y_expt[mask_dormant & mask_active]
expt_indices_dict[expt_name] = prev, prev + y_expt_dict[expt_name].shape[0]
prev = expt_indices_dict[expt_name][-1]
for expt_name in annotated_expt_names:
X_expt, expt_record, expt_path = iterate_expt_for_embedding(expt_name)
assert expt_record.has_annotation
y_expt = self._load_numpy_array(expt_path, "annotations.npy")
if self.use_annotations_to_mask and expt_record.has_annotation:
mask_active = expt_record.mask_annotated
else:
mask_active = expt_record.mask_active
mask_dormant = expt_record.mask_dormant
X_expt_dict[expt_name] = X_expt[mask_dormant & mask_active]
y_expt_dict[expt_name] = y_expt[mask_dormant & mask_active]
expt_indices_dict[expt_name] = (
prev,
prev + y_expt_dict[expt_name].shape[0],
)
prev = expt_indices_dict[expt_name][-1]
X = np.concatenate(list(X_expt_dict.values()), axis=0)
y = np.concatenate(list(y_expt_dict.values()), axis=0)
umap_transformer = umap.UMAP(**self.UMAP_kwargs)
if annotated_expt_names:
embedding = umap_transformer.fit_transform(X, y=y)
else:
embedding = umap_transformer.fit_transform(X)
return embedding, expt_indices_dict
@misc.timeit
def compute_semisupervised_pair_embeddings(self):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
assert all_expt_names
assert annotated_expt_names
assert unannotated_expt_names
pbar = tqdm(
misc.list_cartesian_product(annotated_expt_names, unannotated_expt_names)
)
for ann_expt_name, unann_expt_name in pbar:
pair_name_msg = (
f"(annotated) {ann_expt_name} and (unannotated) {unann_expt_name}"
)
pbar.set_description(
f"Computing semisupervised embeddding for {pair_name_msg}"
)
unann_expt_path = self.expt_path_dict[unann_expt_name]
unann_embedding_name = f"semisupervised_pair_embedding_{ann_expt_name}"
self._update_expt_record(unann_expt_path, unann_embedding_name)
ann_expt_path = self.expt_path_dict[ann_expt_name]
ann_embedding_name = f"semisupervised_pair_embedding_{unann_expt_name}"
self._update_expt_record(ann_expt_path, ann_embedding_name)
embedding, expt_indices_dict = self.compute_behavior_embedding(
[unann_expt_name], [ann_expt_name]
)
start, end = expt_indices_dict[unann_expt_name]
embedding_expt = embedding[start:end]
self._save_numpy_array(
embedding_expt,
unann_expt_path / "embeddings",
f"{unann_embedding_name}.npy",
depth=3,
)
start, end = expt_indices_dict[ann_expt_name]
embedding_expt = embedding[start:end]
self._save_numpy_array(
embedding_expt,
ann_expt_path / "embeddings",
f"{ann_embedding_name}.npy",
depth=3,
)
@misc.timeit
def compute_unsupervised_disparate_embeddings(self):
all_expt_names = list(self.expt_path_dict.keys())
assert all_expt_names
pbar = tqdm(all_expt_names)
for expt_name in pbar:
pbar.set_description(
f"Computing unsupervised disparate embeddding for {expt_name}"
)
expt_path = self.expt_path_dict[expt_name]
embedding_name = "unsupervised_disparate_embedding"
self._update_expt_record(expt_path, embedding_name)
embedding, expt_indices_dict = self.compute_behavior_embedding(
[expt_name], []
)
start, end = expt_indices_dict[expt_name]
embedding_expt = embedding[start:end]
self._save_numpy_array(
embedding_expt,
expt_path / "embeddings",
f"{embedding_name}.npy",
depth=3,
)
@misc.timeit
def compute_supervised_disparate_embeddings(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
assert annotated_expt_names
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
pbar.set_description(
f"Computing supervised disparate embeddding for {ann_expt_name}"
)
ann_expt_path = self.expt_path_dict[ann_expt_name]
ann_embedding_name = "supervised_disparate_embedding"
self._update_expt_record(ann_expt_path, ann_embedding_name)
embedding, expt_indices_dict = self.compute_behavior_embedding(
[], [ann_expt_name]
)
start, end = expt_indices_dict[ann_expt_name]
embedding_expt = embedding[start:end]
self._save_numpy_array(
embedding_expt,
ann_expt_path / "embeddings",
f"{ann_embedding_name}.npy",
depth=3,
)
@misc.timeit
def compute_unsupervised_joint_embeddings(self):
all_expt_names = list(self.expt_path_dict.keys())
assert all_expt_names
embedding, expt_indices_dict = self.compute_behavior_embedding(
all_expt_names, []
)
pbar = tqdm(all_expt_names)
for expt_name in all_expt_names:
pbar.set_description(
"Computing joint unsupervised embeddding for all experiments"
)
expt_path = self.expt_path_dict[expt_name]
embedding_name = "unsupervised_joint_embedding"
self._update_expt_record(expt_path, embedding_name)
start, end = expt_indices_dict[expt_name]
embedding_expt = embedding[start:end]
self._save_numpy_array(
embedding_expt,
expt_path / "embeddings",
f"{embedding_name}.npy",
depth=3,
)
@misc.timeit
def compute_supervised_joint_embeddings(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
assert annotated_expt_names
embedding, expt_indices_dict = self.compute_behavior_embedding(
[], annotated_expt_names
)
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
pbar.set_description(
"Computing joint unsupervised embeddding for annotated experiments"
)
ann_expt_path = self.expt_path_dict[ann_expt_name]
ann_embedding_name = "supervised_joint_embedding"
self._update_expt_record(ann_expt_path, ann_embedding_name)
start, end = expt_indices_dict[ann_expt_name]
embedding_expt = embedding[start:end]
self._save_numpy_array(
embedding_expt,
ann_expt_path / "embeddings",
f"{ann_embedding_name}.npy",
depth=3,
)
class BehaviorClustering(BehaviorMixin):
def __init__(
self,
main_cfg_path,
**kwargs,
):
BehaviorMixin.__init__(self, main_cfg_path, **kwargs)
self.init_behavior_clustering_kwargs(**kwargs)
@misc.timeit
def jointly_cluster(self, expt_names, embedding_names):
embedding_expt_dict = defaultdict()
expt_indices_dict = defaultdict(tuple)
prev = 0
pbar = tqdm(expt_names)
for i, expt_name in enumerate(pbar):
embedding_name = embedding_names[i]
embedding_name_msg = " ".join(embedding_name.split("_"))
self.logger.direct_info(
f"Loading {embedding_name_msg} of {expt_name} for joint clustering."
)
expt_path = self.expt_path_dict[expt_name]
embedding_expt = self._load_numpy_array(
expt_path / "embeddings", f"{embedding_name}.npy"
)
embedding_expt_dict[expt_name] = embedding_expt
expt_indices_dict[expt_name] = prev, prev + embedding_expt.shape[0]
prev = expt_indices_dict[expt_name][-1]
embedding = np.concatenate(list(embedding_expt_dict.values()), axis=0)
clusterer = HDBSCAN(**self.HDBSCAN_kwargs)
cluster_labels = (clusterer.fit_predict(embedding) + 1).astype(int)
pbar = tqdm(expt_names)
for i, expt_name in enumerate(pbar):
embedding_name = embedding_names[i]
expt_path = self.expt_path_dict[expt_name]
start, end = expt_indices_dict[expt_name]
expt_indices_dict[expt_name] = prev, prev + embedding_expt.shape[0]
cluster_labels_expt = cluster_labels[start:end]
self._save_numpy_array(
cluster_labels_expt,
expt_path / "clusterings",
f"labels_joint_cluster_{embedding_name}.npy",
depth=3,
)
cluster_membership = all_points_membership_vectors(clusterer)[start:end]
cluster_membership = np.hstack(
(
1 - np.sum(cluster_membership[:, :], axis=1, keepdims=True),
cluster_membership,
)
)
self._save_numpy_array(
cluster_membership,
expt_path / "clusterings",
f"membership_joint_cluster_{embedding_name}.npy",
depth=3,
)
@misc.timeit
def jointly_cluster_supervised_joint(self):
ann_expt_names = list(self.annotation_path_dict.keys())
embedding_names = ["supervised_joint_embedding" for _ in ann_expt_names]
self.jointly_cluster(ann_expt_names, embedding_names)
@misc.timeit
def jointly_cluster_unsupervised_joint(self):
all_expt_names = list(self.expt_path_dict.keys())
embedding_names = ["unsupervised_joint_embedding" for _ in all_expt_names]
self.jointly_cluster(all_expt_names, embedding_names)
@misc.timeit
def jointly_cluster_semisupervised_pair(self):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
for ann_expt_name, unann_expt_name in misc.list_cartesian_product(
annotated_expt_names, unannotated_expt_names
):
embedding_names = [
f"semisupervised_pair_embedding_{ann_expt_name}",
f"semisupervised_pair_embedding_{unann_expt_name}",
]
self.jointly_cluster([unann_expt_name, ann_expt_name], embedding_names)
@misc.timeit
def disparately_cluster(self, expt_names, embedding_names):
pbar = tqdm(expt_names)
for i, expt_name in enumerate(pbar):
embedding_name = embedding_names[i]
embedding_name_msg = " ".join(embedding_name.split("_"))
pbar.set_description(
f"Disparately clustering {embedding_name_msg} of {expt_name}"
)
expt_path = self.expt_path_dict[expt_name]
embedding_expt = self._load_numpy_array(
expt_path / "embeddings", f"{embedding_name}.npy"
)
clusterer = HDBSCAN(**self.HDBSCAN_kwargs)
cluster_labels = (clusterer.fit_predict(embedding_expt) + 1).astype(int)
self._save_numpy_array(
cluster_labels,
expt_path / "clusterings",
f"labels_disparate_cluster_{embedding_name}.npy",
depth=3,
)
cluster_membership = all_points_membership_vectors(clusterer)
cluster_membership = np.hstack(
(
1 - np.sum(cluster_membership[:, :], axis=1, keepdims=True),
cluster_membership,
)
)
self._save_numpy_array(
cluster_membership,
expt_path / "clusterings",
f"membership_disparate_cluster_{embedding_name}.npy",
depth=3,
)
@misc.timeit
def disparately_cluster_supervised_joint(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
embedding_name = ["supervised_joint_embedding" for _ in annotated_expt_names]
self.disparately_cluster(annotated_expt_names, embedding_name)
@misc.timeit
def disparately_cluster_unsupervised_joint(self):
all_expt_names = list(self.expt_path_dict.keys())
embedding_name = ["unsupervised_joint_embedding" for _ in all_expt_names]
self.disparately_cluster(all_expt_names, embedding_name)
@misc.timeit
def disparately_cluster_supervised_disparate(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
embedding_name = [
"supervised_disparate_embedding" for _ in annotated_expt_names
]
self.disparately_cluster(annotated_expt_names, embedding_name)
@misc.timeit
def disparately_cluster_unsupervised_disparate(self):
all_expt_names = list(self.expt_path_dict.keys())
embedding_name = ["unsupervised_disparate_embedding" for _ in all_expt_names]
self.disparately_cluster(all_expt_names, embedding_name)
@misc.timeit
def disparately_cluster_semisupervised_pair(self):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
for ann_expt_name, unann_expt_name in misc.list_cartesian_product(
annotated_expt_names, unannotated_expt_names
):
embedding_names = [f"semisupervised_pair_embedding_{unann_expt_name}"]
embedding_names = [f"semisupervised_pair_embedding_{ann_expt_name}"]
self.disparately_cluster([ann_expt_name], embedding_names)
self.disparately_cluster([unann_expt_name], embedding_names)
@misc.timeit
def crosswisely_cluster(
self, expt_names1, expt_names2, embedding_names1, embedding_names2
):
embedding_expt_dict = defaultdict()
expt_indices_dict = defaultdict(tuple)
for idx1, expt_name1 in enumerate(expt_names1):
embedding_name1 = embedding_names1[idx1]
for idx2, expt_name2 in enumerate(expt_names2):
embedding_name2 = embedding_names2[idx2]
assert self.is_compatible_approach(
expt_name1, embedding_name1, expt_name2, embedding_name2
)
for idx11, expt_name11 in enumerate(expt_names1[idx1 + 1 :]):
embedding_name11 = embedding_names1[idx11]
assert self.is_compatible_approach(
expt_name1, embedding_name1, expt_name11, embedding_name11
)
prev = 0
pbar = tqdm(expt_names1)
for i, expt_name in enumerate(pbar):
embedding_name = embedding_names1[i]
embedding_name_msg = " ".join(embedding_name.split("_"))
self.logger.direct_info(
f"Loading {embedding_name_msg} of {expt_name} for crosswise clustering."
)
expt_path = self.expt_path_dict[expt_name]
embedding_expt = self._load_numpy_array(
expt_path / "embeddings", f"{embedding_name}.npy"
)
embedding_expt_dict[expt_name] = embedding_expt
expt_indices_dict[expt_name] = prev, prev + embedding_expt.shape[0]
prev = expt_indices_dict[expt_name][-1]
embedding = np.concatenate(list(embedding_expt_dict.values()), axis=0)
clusterer = HDBSCAN(**self.HDBSCAN_kwargs)
cluster_labels = (clusterer.fit_predict(embedding) + 1).astype(int)
cluster_membership = all_points_membership_vectors(clusterer)
clustered_expt_names = "_".join(expt_names1)
pbar = tqdm(expt_names1)
for i, expt_name in enumerate(pbar):
embedding_name = embedding_names1[i]
expt_path = self.expt_path_dict[expt_name]
start, end = expt_indices_dict[expt_name]
expt_indices_dict[expt_name] = prev, prev + embedding_expt.shape[0]
cluster_membership_expt = cluster_membership[start:end]
cluster_membership_expt = np.hstack(
(
1 - np.sum(cluster_membership_expt[:, :], axis=1, keepdims=True),
cluster_membership_expt,
)
)
self._save_numpy_array(
cluster_membership_expt,
expt_path / "clusterings",
f"membership_crosswise_cluster_{embedding_name}_{clustered_expt_names}.npy",
depth=3,
)
cluster_labels_expt = cluster_labels[start:end]
self._save_numpy_array(
cluster_labels_expt,
expt_path / "clusterings",
f"labels_crosswise_cluster_{embedding_name}_{clustered_expt_names}.npy",
depth=3,
)
pbar = tqdm(expt_names2)
for i, expt_name in enumerate(pbar):
embedding_name = embedding_names2[i]
embedding_name_msg = " ".join(embedding_name.split("_"))
self.logger.direct_info(
f"Crosswisely clustering {embedding_name_msg} of {expt_name}"
)
expt_path = self.expt_path_dict[expt_name]
embedding_expt = self._load_numpy_array(
expt_path / "embeddings", f"{embedding_name}.npy"
)
cluster_membership_expt = membership_vector(clusterer, embedding_expt)
cluster_membership_expt = np.hstack(
(
1 - np.sum(cluster_membership_expt[:, 1:], axis=1, keepdims=True),
cluster_membership_expt,
)
)
self._save_numpy_array(
cluster_membership_expt,
expt_path / "clusterings",
f"membership_crosswise_cluster_{embedding_name}_{clustered_expt_names}.npy",
depth=3,
)
# cluster_labels_expt = (
# approximate_predict(clusterer, embedding_expt) + 1
# ).astype(int)
cluster_labels_expt = np.argmax(cluster_membership_expt, axis=1)
self._save_numpy_array(
cluster_labels_expt,
expt_path / "clusterings",
f"labels_crosswise_cluster_{embedding_name}_{clustered_expt_names}.npy",
depth=3,
)
@misc.timeit
def crosswisely_cluster_semisupervised_pair(self):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
for ann_expt_name, unann_expt_name in misc.list_cartesian_product(
annotated_expt_names, unannotated_expt_names
):
ann_embedding_name = f"semisupervised_pair_embedding_{unann_expt_name}"
unann_embedding_name = f"semisupervised_pair_embedding_{ann_expt_name}"
self.crosswisely_cluster(
[ann_expt_name],
[unann_expt_name],
[ann_embedding_name],
[unann_embedding_name],
)
class BehaviorCorrespondence(BehaviorMixin):
def __init__(
self,
main_cfg_path,
**kwargs,
):
BehaviorMixin.__init__(self, main_cfg_path, **kwargs)
self.init_behavior_correspondence_kwargs(**kwargs)
@misc.timeit
def map_cluster_labels_to_behavior_labels(self, expt_name, clustering_name):
expt_path = self.expt_path_dict[expt_name]
expt_record = self._load_joblib_object(expt_path, "expt_record.z")
assert expt_record.has_annotation
y_ann = self._load_numpy_array(expt_path, "annotations.npy")
using_annotations_to_mask = [
key for key, val in expt_record.use_annotations_to_mask.items() if val
]
if any([name in clustering_name for name in using_annotations_to_mask]):
y_ann = y_ann[expt_record.mask_dormant & expt_record.mask_annotated]
else:
y_ann = y_ann[expt_record.mask_dormant & expt_record.mask_active]
y_cluster = self._load_numpy_array(
expt_path / "clusterings", f"labels_{clustering_name}.npy"
)
mapping_dictionary = defaultdict(dict)
y_cluster_uniq, cluster_uniq_counts = np.unique(y_cluster, return_counts=True)
y_ann_uniq, ann_uniq_counts = np.unique(y_ann, return_counts=True)
ann_counts_ref = {
y_ann_uniq[i]: ann_uniq_counts[i] for i in range(y_ann_uniq.shape[0])
}
for idx1, cluster_lbl in enumerate(y_cluster_uniq):
y_ann_masked = y_ann[y_cluster == cluster_lbl]
y_ann_uniq_cluster, ann_uniq_cluster_counts = np.unique(
y_ann_masked, return_counts=True
)
mapping_dictionary[int(cluster_lbl)] = {
key: 0 for key in expt_record.label_to_behavior.keys()
}
for idx2, ann_lbl in enumerate(y_ann_uniq_cluster):
ann_cluster_count = ann_uniq_cluster_counts[idx2]
tf = ann_cluster_count / (cluster_uniq_counts[idx1] + 1)
# tf = 0.5 + 0.5 * (ann_cluster_count / max(ann_uniq_cluster_counts))
# tf = np.log2(ann_cluster_count / (cluster_uniq_counts[idx1] + 1))
# tf = np.log2(1 + ann_cluster_count / (cluster_uniq_counts[idx1] + 1))
# tf = np.log2(1 + ann_cluster_count)
denom = cluster_uniq_counts[idx1] / ann_counts_ref[ann_lbl]
# y_cluster_masked = y_cluster[y_ann == ann_lbl]
# na_max = max(np.unique(y_cluster_masked, return_counts=True)[1])
# denom = na_max / ann_counts_ref[ann_lbl]
# nc = len(np.unique(y_cluster[y_ann == ann_lbl]))
# max_count_ann_lbl = y_ann_uniq_cluster[np.argmax(ann_uniq_cluster_counts)]
# nc_max = len(np.unique(y_cluster[y_ann == max_count_ann_lbl]))
# idf = np.log2(nc_max / (nc + 1)) + 1
# idf = np.log2(len(y_cluster_uniq) / (1 + nc)) + 1
# denom = idf
mapping_dictionary[cluster_lbl][ann_lbl] = float(tf * denom)
# L1- normalization of mapping weights.
sum_weights = sum(list(mapping_dictionary[int(cluster_lbl)].values()))
for ann_lbl in y_ann_uniq_cluster:
mapping_dictionary[cluster_lbl][ann_lbl] = (
mapping_dictionary[cluster_lbl][ann_lbl] / sum_weights
)
assert abs(sum(mapping_dictionary[int(cluster_lbl)].values()) - 1) < EPS
self._save_yaml_dictionary(
dict(mapping_dictionary),
expt_path / "correspondences",
f"mapping_{clustering_name}.yaml",
depth=3,
)
@misc.timeit
def map_disparate_cluster_semisupervised_pair(self):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
pbar = tqdm(
misc.list_cartesian_product(annotated_expt_names, unannotated_expt_names)
)
for ann_expt_name, unann_expt_name in pbar:
embedding_name = f"semisupervised_pair_embedding_{unann_expt_name}"
clustering_name = f"disparate_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_disparate_cluster_supervised_disparate(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
embedding_name = "supervised_disparate_embedding"
clustering_name = f"disparate_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_disparate_cluster_supervised_joint(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
embedding_name = "supervised_joint_embedding"
clustering_name = f"disparate_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_disparate_cluster_unsupervised_disparate(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
embedding_name = "unsupervised_disparate_embedding"
clustering_name = f"disparate_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_disparate_cluster_unsupervised_joint(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
embedding_name = "unsupervised_joint_embedding"
clustering_name = f"disparate_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_joint_cluster_semisupervised_pair(self):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
pbar = tqdm(
misc.list_cartesian_product(annotated_expt_names, unannotated_expt_names)
)
for ann_expt_name, unann_expt_name in pbar:
embedding_name = f"semisupervised_pair_embedding_{unann_expt_name}"
clustering_name = f"joint_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_joint_cluster_supervised_joint(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
embedding_name = "supervised_joint_embedding"
clustering_name = f"joint_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_joint_cluster_unsupervised_joint(self):
annotated_expt_names = list(self.annotation_path_dict.keys())
pbar = tqdm(annotated_expt_names)
for ann_expt_name in pbar:
embedding_name = "unsupervised_joint_embedding"
clustering_name = f"joint_cluster_{embedding_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def map_crosswise_cluster_semisupervised_pair(self):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
pbar = tqdm(
misc.list_cartesian_product(annotated_expt_names, unannotated_expt_names)
)
for ann_expt_name, unann_expt_name in pbar:
embedding_name1 = f"semisupervised_pair_embedding_{unann_expt_name}"
clustering_name = f"crosswise_cluster_{embedding_name1}_{ann_expt_name}"
pbar.set_description(
f"Mapping cluster labels of {clustering_name} to behavior labels"
)
self.map_cluster_labels_to_behavior_labels(ann_expt_name, clustering_name)
@misc.timeit
def disparately_compute_behavior_score(self, expt_names, clustering_names):
pbar = tqdm(expt_names)
for i, expt_name in enumerate(pbar):
expt_path = self.expt_path_dict[expt_name]
clustering_name = clustering_names[i]
expt_record = self._load_joblib_object(expt_path, "expt_record.z")
assert expt_record.has_annotation
label_to_behavior = expt_record.label_to_behavior
behavior_to_label = expt_record.behavior_to_label
inactive_annotation = expt_record.inactive_annotation
num_behavior = len(label_to_behavior)
assert num_behavior > 1
mapping = self._load_yaml_dictionary(
expt_path / "correspondences",
f"mapping_{clustering_name}.yaml",
)
cluster_membership = self._load_numpy_array(
expt_path / "clusterings",
f"membership_{clustering_name}.npy",
)
behavior_score = np.zeros((cluster_membership.shape[0], num_behavior))
for cluster_lbl, behavior_weights in mapping.items():
for behavior_lbl, weight in behavior_weights.items():
if behavior_lbl == behavior_to_label[inactive_annotation]:
weight += (
np.sqrt(1 / (num_behavior - 1))
if not cluster_lbl
else 1 / (num_behavior - 1)
)
behavior_score[:, behavior_lbl] = (
behavior_score[:, behavior_lbl]
+ cluster_membership[:, cluster_lbl]
* weight
* cluster_membership.shape[1]
)
behavior_score = normalize(behavior_score, norm="l1")
# behavior_score = scale(behavior_score, axis=0)
self._save_numpy_array(
behavior_score,
expt_path / "correspondences",
f"score_{clustering_name.replace('cluster', 'behavior')}.npy",
depth=3,
)
@misc.timeit
def crosswisely_compute_behavior_score(
self, expt_names1, expt_names2, clustering_names1, clustering_names2
):
total_mapping = defaultdict(dict)
label_to_behavior = defaultdict()
behavior_to_label = defaultdict()
inactive_annotation = str()
for idx1, expt_name1 in enumerate(expt_names1):
clustering_name1 = clustering_names1[idx1]
for idx2, expt_name2 in enumerate(expt_names2):
clustering_name2 = clustering_names2[idx2]
assert self.is_compatible_approach(
expt_name1, clustering_name1, expt_name2, clustering_name2
)
assert all(list(map(lambda x: "disparate_cluster" not in x, clustering_names1)))
assert all(list(map(lambda x: "disparate_cluster" not in x, clustering_names2)))
for idx, expt_name in enumerate(expt_names1):
expt_path = self.expt_path_dict[expt_name]
clustering_name = clustering_names1[idx]
expt_record = self._load_joblib_object(expt_path, "expt_record.z")
assert idx == 0 or (label_to_behavior == expt_record.label_to_behavior)
assert idx == 0 or (behavior_to_label == expt_record.behavior_to_label)
assert idx == 0 or (inactive_annotation == expt_record.inactive_annotation)
assert expt_record.has_annotation
label_to_behavior = expt_record.label_to_behavior
behavior_to_label = expt_record.behavior_to_label
inactive_annotation = expt_record.inactive_annotation
mapping = self._load_yaml_dictionary(
expt_path / "correspondences",
f"mapping_{clustering_name}.yaml",
)
for cluster_lbl, behavior_weights in mapping.items():
for behavior_lbl, weight in behavior_weights.items():
weight_n = weight / len(expt_names1)
total_mapping[cluster_lbl][behavior_lbl] = (
total_mapping[cluster_lbl].get(behavior_lbl, 0) + weight_n
)
num_behavior = len(label_to_behavior)
assert num_behavior > 1
expt_names = expt_names1 + expt_names2
clustering_names = clustering_names1 + clustering_names2
for idx, expt_name in enumerate(expt_names):
expt_path = self.expt_path_dict[expt_name]
clustering_name = clustering_names[idx]
cluster_membership = self._load_numpy_array(
expt_path / "clusterings",
f"membership_{clustering_name}.npy",
)
behavior_score = np.zeros((cluster_membership.shape[0], num_behavior))
for cluster_lbl, behavior_weights in total_mapping.items():
for behavior_lbl, weight in behavior_weights.items():
if behavior_lbl == behavior_to_label[inactive_annotation]:
weight += (
np.sqrt(1 / (num_behavior - 1))
if not cluster_lbl
else 1 / (num_behavior - 1)
)
cluster_score = (
cluster_membership[:, cluster_lbl]
* weight
* cluster_membership.shape[1]
)
behavior_score[:, behavior_lbl] = (
behavior_score[:, behavior_lbl] + cluster_score
)
behavior_score = normalize(behavior_score, norm="l1")
# behavior_score = scale(behavior_score, axis=0)
self._save_numpy_array(
behavior_score,
expt_path / "correspondences",
f"score_{clustering_name.replace('cluster', 'behavior')}.npy",
depth=3,
)
@misc.timeit
def disparately_compute_behavior_score_disparate_cluster_supervised_disparate(
self,
):
annotated_expt_names = list(self.annotation_path_dict.keys())
for ann_expt_name in annotated_expt_names:
ann_embedding_name = "supervised_disparate_embedding"
ann_clustering_name = f"disparate_cluster_{ann_embedding_name}"
self.disparately_compute_behavior_score(
[ann_expt_name],
[ann_clustering_name],
)
@misc.timeit
def crosswisely_compute_behavior_score_crosswise_cluster_semisupervised_pair(
self,
):
all_expt_names = list(self.expt_path_dict.keys())
annotated_expt_names = list(self.annotation_path_dict.keys())
unannotated_expt_names = list(set(all_expt_names) - set(annotated_expt_names))
for ann_expt_name, unann_expt_name in misc.list_cartesian_product(
annotated_expt_names, unannotated_expt_names
):
ann_embedding_name = f"semisupervised_pair_embedding_{unann_expt_name}"
unann_embedding_name = f"semisupervised_pair_embedding_{ann_expt_name}"
ann_clustering_name = (
f"crosswise_cluster_{ann_embedding_name}_{ann_expt_name}"
)
unann_clustering_name = (
f"crosswise_cluster_{unann_embedding_name}_{ann_expt_name}"
)
self.crosswisely_compute_behavior_score(
[ann_expt_name],
[unann_expt_name],
[ann_clustering_name],
[unann_clustering_name],
)
class BehaviorMapping(BehaviorEmbedding, BehaviorClustering, BehaviorCorrespondence):
def __init__(
self,
main_cfg_path,
**kwargs,
):
BehaviorEmbedding.__init__(self, main_cfg_path, **kwargs)
BehaviorClustering.__init__(self, main_cfg_path, **kwargs)
BehaviorCorrespondence.__init__(self, main_cfg_path, **kwargs)
| 42.720665 | 92 | 0.63335 | 4,644 | 41,140 | 5.168174 | 0.047158 | 0.044998 | 0.043498 | 0.024082 | 0.837382 | 0.796842 | 0.75801 | 0.731178 | 0.706929 | 0.669264 | 0 | 0.007028 | 0.290958 | 41,140 | 962 | 93 | 42.765073 | 0.81577 | 0.02246 | 0 | 0.599515 | 0 | 0 | 0.105784 | 0.058913 | 0 | 0 | 0 | 0 | 0.03034 | 1 | 0.048544 | false | 0 | 0.010922 | 0 | 0.069175 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b2a0c81ad451f155e16456dc9637073cb6b6656c | 1,695 | py | Python | python/tools/validators.py | sshcrack/dancyPi-audio-reactive-led | 955c4765eadb3642a6ef1f1da96d23087155ff8c | [
"MIT"
] | null | null | null | python/tools/validators.py | sshcrack/dancyPi-audio-reactive-led | 955c4765eadb3642a6ef1f1da96d23087155ff8c | [
"MIT"
] | null | null | null | python/tools/validators.py | sshcrack/dancyPi-audio-reactive-led | 955c4765eadb3642a6ef1f1da96d23087155ff8c | [
"MIT"
] | null | null | null | from tools.tools import check_float, check_int
def validate_int(param_name: str, min_val = None, max_val = None):
def func(param):
if param == None or len(param) == 0 or param[0] == None:
return {
"error": f"{param_name} has to have an value",
"result": None
}
param = param[0]
if not check_int(param):
return {
"error": f"{param_name} has to be an integer",
"result": None
}
res = int(param)
if (min_val != None and res < min_val) or (max_val != None and res > max_val):
return {
"error": f"{param_name} can not be below {min_val} and not above {max_val}",
"result": None
}
return {
"result": res
}
return func
def validate_float(param_name: str, min_val = None, max_val = None):
def func(param):
if param == None or len(param) == 0 or param[0] == None:
return {
"error": f"{param_name} has to have an value",
"result": None
}
param = param[0]
if not check_float(param):
return {
"error": f"{param_name} has to be a float",
"result": None
}
res = float(param)
if (min_val != None and res < min_val) or (max_val != None and res > max_val):
return {
"error": f"{param_name} can not be below {min_val} and not above {max_val}",
"result": None
}
return {
"result": res
}
return func | 28.728814 | 92 | 0.473746 | 208 | 1,695 | 3.716346 | 0.182692 | 0.093144 | 0.093144 | 0.131953 | 0.838292 | 0.838292 | 0.838292 | 0.838292 | 0.838292 | 0.752911 | 0 | 0.006091 | 0.418879 | 1,695 | 59 | 93 | 28.728814 | 0.77868 | 0 | 0 | 0.638298 | 0 | 0 | 0.196344 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.085106 | false | 0 | 0.021277 | 0 | 0.319149 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
b2a137935950de9607db79de35037c223884e718 | 317 | py | Python | dae/dae/tools/tests/test_generate_denovo_gene_sets.py | iossifovlab/gpf | e556243d29666179dbcb72859845b4d6c011af2b | [
"MIT"
] | null | null | null | dae/dae/tools/tests/test_generate_denovo_gene_sets.py | iossifovlab/gpf | e556243d29666179dbcb72859845b4d6c011af2b | [
"MIT"
] | 82 | 2019-07-22T11:44:23.000Z | 2022-01-13T15:27:33.000Z | dae/dae/tools/tests/test_generate_denovo_gene_sets.py | iossifovlab/gpf | e556243d29666179dbcb72859845b4d6c011af2b | [
"MIT"
] | null | null | null | import pytest
from dae.tools.generate_denovo_gene_sets import main
pytestmark = pytest.mark.usefixtures("gene_info_cache_dir")
def test_generate_denovo_gene_sets_script_passes(gpf_instance_2013):
main(gpf_instance=gpf_instance_2013, argv=[])
main(gpf_instance=gpf_instance_2013, argv=["--show-studies"])
| 26.416667 | 68 | 0.81388 | 46 | 317 | 5.173913 | 0.565217 | 0.231092 | 0.189076 | 0.184874 | 0.285714 | 0.285714 | 0.285714 | 0 | 0 | 0 | 0 | 0.041522 | 0.088328 | 317 | 11 | 69 | 28.818182 | 0.782007 | 0 | 0 | 0 | 1 | 0 | 0.104101 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0.166667 | 0.333333 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 6 |
b2b14011a51f7938816f314bb3e21140d85790da | 326 | py | Python | analysis_llt/ml/cv/__init__.py | Tammy-Lee/analysis-llt | ea1bb62d614bb75dac68c010a0cc524a5be185f2 | [
"MIT"
] | null | null | null | analysis_llt/ml/cv/__init__.py | Tammy-Lee/analysis-llt | ea1bb62d614bb75dac68c010a0cc524a5be185f2 | [
"MIT"
] | null | null | null | analysis_llt/ml/cv/__init__.py | Tammy-Lee/analysis-llt | ea1bb62d614bb75dac68c010a0cc524a5be185f2 | [
"MIT"
] | null | null | null | from analysis_llt.ml.cv.base import SCORES
from analysis_llt.ml.cv.ensemble import RandomForestClassifierCV
from analysis_llt.ml.cv.svm import SVCCV
from analysis_llt.ml.cv.neighbors import KNNCV
from analysis_llt.ml.cv.neural_network import MLPClassifierCV
from analysis_llt.ml.cv.linear_model import LogisticRegressionCVLLT
| 46.571429 | 67 | 0.871166 | 50 | 326 | 5.52 | 0.4 | 0.26087 | 0.326087 | 0.369565 | 0.413043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.07362 | 326 | 6 | 68 | 54.333333 | 0.913907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
a23f20817bae5bf770e5162955069b561421e860 | 6,201 | py | Python | adv_attacks.py | AdversarialFishRecognition/AdvFish | d61035a7bc7345ee22c668944255dd90ab73595b | [
"MIT"
] | 3 | 2021-01-05T13:22:26.000Z | 2021-12-17T08:30:07.000Z | adv_attacks.py | UnderwaterDL/AdvFish | d61035a7bc7345ee22c668944255dd90ab73595b | [
"MIT"
] | null | null | null | adv_attacks.py | UnderwaterDL/AdvFish | d61035a7bc7345ee22c668944255dd90ab73595b | [
"MIT"
] | null | null | null | """
Adapted from:
https://github.com/MadryLab/cifar10_challenge/blob/master/pgd_attack.py
Implementation of attack methods. Running this file as a program will
apply the attack to the model specified by the config file and store
the examples in an .npy file.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import numpy as np
class LinfPGDAttack:
def __init__(self, model, epsilon, eps_iter, nb_iter, kappa=0, random_start=False,
loss_func='xent', clip_min=0.0, clip_max=1.0):
"""Attack parameter initialization. The attack performs k steps of
size a, while always staying within epsilon from the initial
point."""
self.model = model
self.epsilon = epsilon
self.eps_iter = eps_iter
self.nb_iter = nb_iter
self.kappa = kappa
self.rand = random_start
self.clip_min = clip_min
self.clip_max = clip_max
self.x_input = self.model.layers[0].input
logits = self.model.layers[-2].output
y_pred = tf.nn.softmax(logits)
self.y_true = tf.placeholder(tf.float32, shape=y_pred.get_shape().as_list())
if loss_func == 'xent':
self.loss = -tf.reduce_sum(self.y_true * tf.log(y_pred), axis=1)
elif loss_func == 'cw':
correct_logit = tf.reduce_sum(self.y_true * logits, axis=1)
wrong_logit = tf.reduce_max((1 - self.y_true) * logits, axis=1)
self.loss = -tf.nn.relu(correct_logit - wrong_logit + kappa)
else:
print('Unknown loss function. Defaulting to cross-entropy')
self.loss = -tf.reduce_sum(self.y_true * tf.log(y_pred), axis=1)
self.grad = tf.gradients(self.loss, self.x_input)[0]
def perturb(self, sess, x_nat, y, batch_size):
"""Given a set of examples (x_nat, y), returns a set of adversarial
examples within epsilon of x_nat in l_infinity norm."""
if self.rand:
x = x_nat + np.random.uniform(-self.epsilon, self.epsilon, x_nat.shape)
else:
x = np.copy(x_nat)
nb_batch = len(x) // batch_size
# check if need one more batch
if nb_batch * batch_size < len(x):
nb_batch += 1
for i in range(nb_batch):
start = i * batch_size
end = (i + 1) * batch_size
end = np.minimum(end, len(x))
batch_x = x[start:end]
batch_y = y[start:end]
for j in range(self.nb_iter):
loss, grad = sess.run([self.loss, self.grad],
feed_dict={self.x_input: batch_x,
self.y_true: batch_y})
grad = np.nan_to_num(grad)
batch_x += self.eps_iter * np.sign(grad)
batch_x = np.clip(batch_x, x_nat[start:end] - self.epsilon, x_nat[start:end] + self.epsilon)
batch_x = np.clip(batch_x, self.clip_min, self.clip_max) # ensure valid pixel range
x[start:end] = batch_x[:]
return x
"""
Adaptive Fast Gradient Sign Method (AdaFGSM)
"""
class AdaFGSM:
def __init__(self, model, epsilon, kappa=0, random_start=False,
loss_func='xent', clip_min=0.0, clip_max=1.0):
"""Attack parameter initialization. The attack performs k steps of
size a, while always staying within epsilon from the initial
point."""
self.model = model
self.epsilon = epsilon
self.kappa = kappa
self.rand = random_start
self.clip_min = clip_min
self.clip_max = clip_max
self.x_input = self.model.layers[0].input
logits = self.model.layers[-2].output
y_pred = tf.nn.softmax(logits)
self.y_true = tf.placeholder(tf.float32, shape=y_pred.get_shape().as_list())
if loss_func == 'xent':
self.loss = -tf.reduce_sum(self.y_true * tf.log(y_pred), axis=1)
elif loss_func == 'cw':
correct_logit = tf.reduce_sum(self.y_true * logits, axis=1)
wrong_logit = tf.reduce_max((1 - self.y_true) * logits, axis=1)
self.loss = -tf.nn.relu(correct_logit - wrong_logit + kappa)
else:
print('Unknown loss function. Defaulting to cross-entropy')
self.loss = -tf.reduce_sum(self.y_true * tf.log(y_pred), axis=1)
self.grad = tf.gradients(self.loss, self.x_input)[0]
def perturb(self, sess, x_nat, y, batch_size):
"""Given a set of examples (x_nat, y), returns a set of adversarial
examples within epsilon of x_nat in l_infinity norm."""
if self.rand:
x = x_nat + np.random.uniform(-self.epsilon, self.epsilon, x_nat.shape)
else:
x = np.copy(x_nat)
nb_batch = len(x) // batch_size
# check if need one more batch
if nb_batch * batch_size < len(x):
nb_batch += 1
for i in range(nb_batch):
start = i * batch_size
end = (i + 1) * batch_size
end = np.minimum(end, len(x))
batch_x = x[start:end]
batch_y = y[start:end]
# compute the input gradients
loss, grad = sess.run([self.loss, self.grad],
feed_dict={self.x_input: batch_x,
self.y_true: batch_y})
grad = np.nan_to_num(grad)
# get the maximum gradient magnitude
max_norm = np.max(np.abs(grad))
# normalized gradient to [0,1]
grad = grad/max_norm
# apply an adaptive perturbation to input, replacing the hard perturbation: epsilon*sign(grad)
batch_x += self.epsilon * grad
# clipping to ensure the perturbed pixel values are still within the valid range (eg. [0,1])
batch_x = np.clip(batch_x, x_nat[start:end] - self.epsilon, x_nat[start:end] + self.epsilon)
batch_x = np.clip(batch_x, self.clip_min, self.clip_max) # ensure valid pixel range [0,1]
x[start:end] = batch_x[:]
return x
| 40.006452 | 108 | 0.58797 | 885 | 6,201 | 3.929944 | 0.188701 | 0.018401 | 0.031052 | 0.018976 | 0.780334 | 0.767108 | 0.767108 | 0.754457 | 0.754457 | 0.754457 | 0 | 0.009777 | 0.307209 | 6,201 | 154 | 109 | 40.266234 | 0.799814 | 0.184809 | 0 | 0.843137 | 0 | 0 | 0.024485 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.039216 | false | 0 | 0.04902 | 0 | 0.127451 | 0.029412 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
a2871fd8b9dbdf6bd07d4e1075f56fa620648ce2 | 178 | py | Python | htu21/__init__.py | Jeremie-C/python-htu21 | 6503b8d9abff240e2ebe5df890b19dfa46bac6b3 | [
"MIT"
] | null | null | null | htu21/__init__.py | Jeremie-C/python-htu21 | 6503b8d9abff240e2ebe5df890b19dfa46bac6b3 | [
"MIT"
] | null | null | null | htu21/__init__.py | Jeremie-C/python-htu21 | 6503b8d9abff240e2ebe5df890b19dfa46bac6b3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Jeremie-C'
from .htu21 import HTU21
from .htu21 import H14T14, H08T12, H10T13, H11T11
from .htu21 import HOLD, NOHOLD
| 25.428571 | 49 | 0.713483 | 26 | 178 | 4.730769 | 0.730769 | 0.219512 | 0.365854 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.164474 | 0.146067 | 178 | 6 | 50 | 29.666667 | 0.644737 | 0.235955 | 0 | 0 | 0 | 0 | 0.067164 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a2936272174a801ea153c411e85e9a4c622f5fc0 | 84 | py | Python | Problems/HackerRank/polar.py | kvlizhvn/Lab_7 | e7f7f8da2b5f52a426bb55981594fb8ddcbd127a | [
"MIT"
] | 1 | 2022-02-18T15:44:46.000Z | 2022-02-18T15:44:46.000Z | Problems/HackerRank/polar.py | kvlizhvn/Lab_7 | e7f7f8da2b5f52a426bb55981594fb8ddcbd127a | [
"MIT"
] | null | null | null | Problems/HackerRank/polar.py | kvlizhvn/Lab_7 | e7f7f8da2b5f52a426bb55981594fb8ddcbd127a | [
"MIT"
] | 1 | 2021-03-26T13:55:52.000Z | 2021-03-26T13:55:52.000Z | import cmath
c = complex(input())
print(cmath.polar(c)[0])
print(cmath.polar(c)[1])
| 16.8 | 24 | 0.690476 | 15 | 84 | 3.866667 | 0.6 | 0.344828 | 0.517241 | 0.551724 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025974 | 0.083333 | 84 | 4 | 25 | 21 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.25 | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
a299bb1e4509a7758e13de7f05e549eb60d05504 | 75 | py | Python | utils/__init__.py | Lisovq/markov-chain-bot | b7fb8c1e47c0abda6ccc9dfed1d0b35b8a6eaff1 | [
"MIT"
] | null | null | null | utils/__init__.py | Lisovq/markov-chain-bot | b7fb8c1e47c0abda6ccc9dfed1d0b35b8a6eaff1 | [
"MIT"
] | null | null | null | utils/__init__.py | Lisovq/markov-chain-bot | b7fb8c1e47c0abda6ccc9dfed1d0b35b8a6eaff1 | [
"MIT"
] | null | null | null | from .message_generator import generate
from .random_send import randomize
| 25 | 39 | 0.866667 | 10 | 75 | 6.3 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.106667 | 75 | 2 | 40 | 37.5 | 0.940299 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a29acef6d5389dfeadb42034b5acf3b4345af9a5 | 16,719 | py | Python | lib/FakeObjectsForTests/FakeObjectsForTestsImpl.py | JamesJeffryes/FakeObjectsForTests | 19e29b7b76531082a65954b29db0baaa4b33353a | [
"MIT"
] | null | null | null | lib/FakeObjectsForTests/FakeObjectsForTestsImpl.py | JamesJeffryes/FakeObjectsForTests | 19e29b7b76531082a65954b29db0baaa4b33353a | [
"MIT"
] | null | null | null | lib/FakeObjectsForTests/FakeObjectsForTestsImpl.py | JamesJeffryes/FakeObjectsForTests | 19e29b7b76531082a65954b29db0baaa4b33353a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#BEGIN_HEADER
import os
import time
import json
import copy
from biokbase.workspace.client import Workspace as workspaceService
from DataFileUtil.DataFileUtilClient import DataFileUtil
#END_HEADER
class FakeObjectsForTests:
'''
Module Name:
FakeObjectsForTests
Module Description:
A KBase module: FakeObjectsForTests
'''
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "0.0.2"
GIT_URL = "https://github.com/kbaseapps/FakeObjectsForTests"
GIT_COMMIT_HASH = "b71a1a07cc622d964997a6c63ef545aafcb85c8c"
#BEGIN_CLASS_HEADER
def ws(self, ctx):
return workspaceService(self.wsURL, token=ctx['token'])
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
self.wsURL = config['workspace-url']
self.scratch = config['scratch']
#END_CONSTRUCTOR
pass
def create_any_objects(self, ctx, params):
"""
:param params: instance of type "CreateAnyObjectsParams"
(ws_id/ws_name - two alternative ways to set target workspace,
obj_names - list of names for target workspace objects, metadata -
optional metadata.) -> structure: parameter "ws_id" of Long,
parameter "ws_name" of String, parameter "obj_names" of list of
String, parameter "metadata" of mapping from String to String
:returns: instance of list of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN create_any_objects
metadata = params.get('metadata')
objects = []
for obj_name in params['obj_names']:
objects.append({'type': 'Empty.AType', 'data': {'foo': 5},
'name': obj_name, 'meta': metadata})
so_params = {'objects': objects}
if 'ws_id' in params:
so_params['id'] = params['ws_id']
elif 'ws_name' in params:
so_params['workspace'] = params['ws_name']
returnVal = self.ws(ctx).save_objects(so_params)
#END create_any_objects
# At some point might do deeper type checking...
if not isinstance(returnVal, list):
raise ValueError('Method create_any_objects return value ' +
'returnVal is not type list as required.')
# return the results
return [returnVal]
def create_fake_genomes(self, ctx, params):
"""
:param params: instance of type "CreateFakeGenomesParams"
(ws_id/ws_name - two alternative ways to set target workspace,
obj_names - list of names for target workspace objects (of type
'KBaseGenomes.Genome'), metadata - optional metadata.) ->
structure: parameter "ws_id" of Long, parameter "ws_name" of
String, parameter "obj_names" of list of String, parameter
"metadata" of mapping from String to String
:returns: instance of list of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN create_fake_genomes
metadata = params.get('metadata')
objects = []
genome_data = json.load(open('/kb/module/data/genome.json'))
for obj_name in params['obj_names']:
data = copy.copy(genome_data)
data['id'] = obj_name
objects.append({'type': 'KBaseGenomes.Genome',
'data': data,
'name': obj_name, 'meta': metadata})
so_params = {'objects': objects}
if 'ws_id' in params:
so_params['id'] = params['ws_id']
elif 'ws_name' in params:
so_params['workspace'] = params['ws_name']
returnVal = self.ws(ctx).save_objects(so_params)
#END create_fake_genomes
# At some point might do deeper type checking...
if not isinstance(returnVal, list):
raise ValueError('Method create_fake_genomes return value ' +
'returnVal is not type list as required.')
# return the results
return [returnVal]
def create_fake_reads(self, ctx, params):
"""
:param params: instance of type "CreateFakeReadsParams"
(ws_id/ws_name - two alternative ways to set target workspace,
obj_names - list of names for target workspace objects (of type
'KBaseFile.SingleEndLibrary'), metadata - optional metadata.) ->
structure: parameter "ws_id" of Long, parameter "ws_name" of
String, parameter "obj_names" of list of String, parameter
"metadata" of mapping from String to String
:returns: instance of list of type "object_info" (Information about
an object, including user provided metadata. obj_id objid - the
numerical id of the object. obj_name name - the name of the
object. type_string type - the type of the object. timestamp
save_date - the save date of the object. obj_ver ver - the version
of the object. username saved_by - the user that saved or copied
the object. ws_id wsid - the workspace containing the object.
ws_name workspace - the workspace containing the object. string
chsum - the md5 checksum of the object. int size - the size of the
object in bytes. usermeta meta - arbitrary user-supplied metadata
about the object.) -> tuple of size 11: parameter "objid" of type
"obj_id" (The unique, permanent numerical ID of an object.),
parameter "name" of type "obj_name" (A string used as a name for
an object. Any string consisting of alphanumeric characters and
the characters |._- that is not an integer is acceptable.),
parameter "type" of type "type_string" (A type string. Specifies
the type and its version in a single string in the format
[module].[typename]-[major].[minor]: module - a string. The module
name of the typespec containing the type. typename - a string. The
name of the type as assigned by the typedef statement. major - an
integer. The major version of the type. A change in the major
version implies the type has changed in a non-backwards compatible
way. minor - an integer. The minor version of the type. A change
in the minor version implies that the type has changed in a way
that is backwards compatible with previous type definitions. In
many cases, the major and minor versions are optional, and if not
provided the most recent version will be used. Example:
MyModule.MyType-3.1), parameter "save_date" of type "timestamp" (A
time in the format YYYY-MM-DDThh:mm:ssZ, where Z is either the
character Z (representing the UTC timezone) or the difference in
time to UTC in the format +/-HHMM, eg: 2012-12-17T23:24:06-0500
(EST time) 2013-04-03T08:56:32+0000 (UTC time)
2013-04-03T08:56:32Z (UTC time)), parameter "version" of Long,
parameter "saved_by" of type "username" (Login name of a KBase
user account.), parameter "wsid" of type "ws_id" (The unique,
permanent numerical ID of a workspace.), parameter "workspace" of
type "ws_name" (A string used as a name for a workspace. Any
string consisting of alphanumeric characters and "_", ".", or "-"
that is not an integer is acceptable. The name may optionally be
prefixed with the workspace owner's user name and a colon, e.g.
kbasetest:my_workspace.), parameter "chsum" of String, parameter
"size" of Long, parameter "meta" of type "usermeta" (User provided
metadata about an object. Arbitrary key-value pairs provided by
the user.) -> mapping from String to String
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN create_fake_reads
metadata = params.get('metadata')
objects = []
dfu = DataFileUtil(os.environ['SDK_CALLBACK_URL'])
path_to_temp_file = "/kb/module/work/tmp/temp_" + str(time.time()) + ".fq"
with open(path_to_temp_file, 'w') as f:
f.write(' ')
uploadedfile = dfu.file_to_shock({'file_path': path_to_temp_file,
'make_handle': 1, 'pack': 'gzip'})
fhandle = uploadedfile['handle']
os.remove(path_to_temp_file)
data = {'lib': {'encoding': "ascii", 'file': fhandle, 'size': 1,
'type': "fq"},
'sequencing_tech': "Illumina",
'single_genome': 1}
for obj_name in params['obj_names']:
objects.append({'type': 'KBaseFile.SingleEndLibrary',
'data': data, 'name': obj_name, 'meta': metadata})
so_params = {'objects': objects}
if 'ws_id' in params:
so_params['id'] = params['ws_id']
elif 'ws_name' in params:
so_params['workspace'] = params['ws_name']
returnVal = self.ws(ctx).save_objects(so_params)
#END create_fake_reads
# At some point might do deeper type checking...
if not isinstance(returnVal, list):
raise ValueError('Method create_fake_reads return value ' +
'returnVal is not type list as required.')
# return the results
return [returnVal]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK",
'message': "",
'version': self.VERSION,
'git_url': self.GIT_URL,
'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
| 55.73 | 82 | 0.629523 | 2,206 | 16,719 | 4.688577 | 0.140073 | 0.015953 | 0.022334 | 0.011022 | 0.838925 | 0.829643 | 0.829643 | 0.827129 | 0.816108 | 0.816108 | 0 | 0.016544 | 0.291405 | 16,719 | 299 | 83 | 55.916388 | 0.856504 | 0.661822 | 0 | 0.448276 | 0 | 0 | 0.199673 | 0.027589 | 0 | 0 | 0 | 0 | 0 | 1 | 0.068966 | false | 0.011494 | 0.068966 | 0.011494 | 0.241379 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0c2cd13b1d8aee5f07892c081259443db198acad | 150 | py | Python | torch_datasets/datasets/__init__.py | mingruimingrui/torch-datasets | 2640b8c4fa82156e68e617fc545a546b4e08dc4e | [
"MIT"
] | null | null | null | torch_datasets/datasets/__init__.py | mingruimingrui/torch-datasets | 2640b8c4fa82156e68e617fc545a546b4e08dc4e | [
"MIT"
] | null | null | null | torch_datasets/datasets/__init__.py | mingruimingrui/torch-datasets | 2640b8c4fa82156e68e617fc545a546b4e08dc4e | [
"MIT"
] | null | null | null | from .detection_dataset import DetectionDataset
from .siamese_dataset import SiameseDataset
from .classification_dataset import ClassificationDataset
| 37.5 | 57 | 0.9 | 15 | 150 | 8.8 | 0.6 | 0.295455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.08 | 150 | 3 | 58 | 50 | 0.956522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
0c308717fad391201f73a1dcbce04981044fbb39 | 9,106 | py | Python | model/multiple_choice.py | guanzhchen/PETuning | eb36327713e237ea95a8982ceabb71de5ca4b09d | [
"MIT"
] | 10 | 2022-02-18T00:54:12.000Z | 2022-03-23T11:22:50.000Z | model/multiple_choice.py | guanzhchen/PETuning | eb36327713e237ea95a8982ceabb71de5ca4b09d | [
"MIT"
] | null | null | null | model/multiple_choice.py | guanzhchen/PETuning | eb36327713e237ea95a8982ceabb71de5ca4b09d | [
"MIT"
] | null | null | null | import torch
from torch._C import NoopLogger
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from torch.nn import CrossEntropyLoss, MSELoss, BCEWithLogitsLoss
from transformers import BertModel, BertPreTrainedModel
from transformers.modeling_outputs import MultipleChoiceModelOutput, BaseModelOutput, Seq2SeqLMOutput
from model.prefix_encoder import PrefixEncoder
from transformers.adapters.model_mixin import ModelWithHeadsAdaptersMixin
# from transformers import RobertaModel, RobertaPreTrainedModel
from model.roberta import RobertaModel, RobertaPreTrainedModel
class RobertaPrefixForMultipleChoice(ModelWithHeadsAdaptersMixin, RobertaPreTrainedModel):
_keys_to_ignore_on_load_missing = [r"position_ids"]
def __init__(self, config):
super().__init__(config)
self.roberta = RobertaModel(config)
self.dropout = torch.nn.Dropout(config.hidden_dropout_prob)
self.classifier = torch.nn.Linear(config.hidden_size, 1)
self.init_weights()
for param in self.roberta.parameters():
param.requires_grad = False
self.pre_seq_len = config.pre_seq_len
self.n_layer = config.num_hidden_layers
self.n_head = config.num_attention_heads
self.n_embd = config.hidden_size // config.num_attention_heads
self.prefix_tokens = torch.arange(self.pre_seq_len).long()
self.prefix_encoder = PrefixEncoder(config)
bert_param = 0
for name, param in self.roberta.named_parameters():
bert_param += param.numel()
all_param = 0
for name, param in self.named_parameters():
all_param += param.numel()
total_param = all_param - bert_param
print('total param is {}'.format(total_param))
def get_prompt(self, batch_size):
prefix_tokens = self.prefix_tokens.unsqueeze(0).expand(batch_size, -1).to(self.roberta.device)
past_key_values = self.prefix_encoder(prefix_tokens)
past_key_values = past_key_values.view(
batch_size,
self.pre_seq_len,
self.n_layer * 2,
self.n_head,
self.n_embd
)
past_key_values = self.dropout(past_key_values)
past_key_values = past_key_values.permute([2, 0, 3, 1, 4]).split(2)
return past_key_values
def forward(
self,
input_ids=None,
token_type_ids=None,
attention_mask=None,
labels=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
adapter_names=None,
head=None,
**kwargs
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the multiple choice classification loss. Indices should be in ``[0, ...,
num_choices-1]`` where :obj:`num_choices` is the size of the second dimension of the input tensors. (See
:obj:`input_ids` above)
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
batch_size, num_choices = input_ids.shape[:2] if input_ids is not None else inputs_embeds.shape[:2]
flat_input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
flat_position_ids = position_ids.view(-1, position_ids.size(-1)) if position_ids is not None else None
flat_token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None
flat_attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None
flat_inputs_embeds = (
inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1))
if inputs_embeds is not None
else None
)
past_key_values = self.get_prompt(batch_size=batch_size * num_choices)
prefix_attention_mask = torch.ones(batch_size * num_choices, self.pre_seq_len).to(self.roberta.device)
flat_attention_mask = torch.cat((prefix_attention_mask, flat_attention_mask), dim=1)
outputs = self.roberta(
flat_input_ids,
position_ids=flat_position_ids,
token_type_ids=flat_token_type_ids,
attention_mask=flat_attention_mask,
head_mask=head_mask,
inputs_embeds=flat_inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
adapter_names=adapter_names,
past_key_values=past_key_values,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
reshaped_logits = logits.view(-1, num_choices)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(reshaped_logits, labels)
if not return_dict:
output = (reshaped_logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return MultipleChoiceModelOutput(
loss=loss,
logits=reshaped_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
class RobertaLoraForMultipleChoice(ModelWithHeadsAdaptersMixin, RobertaPreTrainedModel):
_keys_to_ignore_on_load_missing = [r"position_ids"]
def __init__(self, config):
super().__init__(config)
self.roberta = RobertaModel(config)
self.dropout = torch.nn.Dropout(config.hidden_dropout_prob)
self.classifier = torch.nn.Linear(config.hidden_size, 1)
self.init_weights()
for name, param in self.roberta.named_parameters():
if "lora" not in name.lower():
param.requires_grad = False
bert_param = 0
for name, param in self.roberta.named_parameters():
bert_param += param.numel()
all_param = 0
for name, param in self.named_parameters():
all_param += param.numel()
total_param = all_param - bert_param
print('total param is {}'.format(total_param))
def forward(
self,
input_ids=None,
token_type_ids=None,
attention_mask=None,
labels=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
adapter_names=None,
head=None,
**kwargs
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the multiple choice classification loss. Indices should be in ``[0, ...,
num_choices-1]`` where :obj:`num_choices` is the size of the second dimension of the input tensors. (See
:obj:`input_ids` above)
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
batch_size, num_choices = input_ids.shape[:2] if input_ids is not None else inputs_embeds.shape[:2]
flat_input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
flat_position_ids = position_ids.view(-1, position_ids.size(-1)) if position_ids is not None else None
flat_token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1)) if token_type_ids is not None else None
flat_attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None
flat_inputs_embeds = (
inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1))
if inputs_embeds is not None
else None
)
outputs = self.roberta(
flat_input_ids,
position_ids=flat_position_ids,
token_type_ids=flat_token_type_ids,
attention_mask=flat_attention_mask,
head_mask=head_mask,
inputs_embeds=flat_inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
adapter_names=adapter_names,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
reshaped_logits = logits.view(-1, num_choices)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(reshaped_logits, labels)
if not return_dict:
output = (reshaped_logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return MultipleChoiceModelOutput(
loss=loss,
logits=reshaped_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
) | 38.260504 | 118 | 0.655941 | 1,135 | 9,106 | 4.971806 | 0.135683 | 0.025518 | 0.028708 | 0.03686 | 0.782385 | 0.7675 | 0.744639 | 0.744639 | 0.738083 | 0.738083 | 0 | 0.007889 | 0.262245 | 9,106 | 238 | 119 | 38.260504 | 0.832093 | 0.075115 | 0 | 0.751351 | 0 | 0 | 0.007445 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.027027 | false | 0 | 0.059459 | 0 | 0.135135 | 0.010811 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0c541ffb07cf4a03075b25153ff8b317a8d362dc | 87 | py | Python | pr2roc/__init__.py | ameya98/roc2pr | ab19d7552e2e9ae32ca00a1be4a17b29a3f915fa | [
"MIT"
] | 1 | 2020-09-08T14:51:48.000Z | 2020-09-08T14:51:48.000Z | pr2roc/__init__.py | ameya98/pr2roc | ab19d7552e2e9ae32ca00a1be4a17b29a3f915fa | [
"MIT"
] | null | null | null | pr2roc/__init__.py | ameya98/pr2roc | ab19d7552e2e9ae32ca00a1be4a17b29a3f915fa | [
"MIT"
] | null | null | null | from .curve import Curve
from .pr_curve import PRCurve
from .roc_curve import ROCCurve
| 21.75 | 31 | 0.827586 | 14 | 87 | 5 | 0.5 | 0.471429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137931 | 87 | 3 | 32 | 29 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a76686e764556c5635c67b7a43db744163a92772 | 79 | py | Python | limix_tool/dist/__init__.py | Horta/limix-tool | 4b36eb9ad1d7f5813a189d8619fbb028164a0a54 | [
"MIT"
] | null | null | null | limix_tool/dist/__init__.py | Horta/limix-tool | 4b36eb9ad1d7f5813a189d8619fbb028164a0a54 | [
"MIT"
] | 1 | 2016-11-03T14:59:33.000Z | 2016-11-03T14:59:33.000Z | limix_tool/dist/__init__.py | Horta/limix-tool | 4b36eb9ad1d7f5813a189d8619fbb028164a0a54 | [
"MIT"
] | null | null | null | from __future__ import absolute_import as _absolute_import
from . import norm
| 19.75 | 58 | 0.848101 | 11 | 79 | 5.454545 | 0.545455 | 0.466667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139241 | 79 | 3 | 59 | 26.333333 | 0.882353 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a78f90e2cd28c1836c5793527b804c4b78913487 | 64 | py | Python | mgxsim/__init__.py | patrickwest/mgx-sim | f1457540dedf72076478f1af8765bb4d1ab197e9 | [
"BSD-3-Clause"
] | null | null | null | mgxsim/__init__.py | patrickwest/mgx-sim | f1457540dedf72076478f1af8765bb4d1ab197e9 | [
"BSD-3-Clause"
] | null | null | null | mgxsim/__init__.py | patrickwest/mgx-sim | f1457540dedf72076478f1af8765bb4d1ab197e9 | [
"BSD-3-Clause"
] | null | null | null | from .genome import Genome
from .genome_index import GenomeIndex | 32 | 37 | 0.859375 | 9 | 64 | 6 | 0.555556 | 0.37037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.109375 | 64 | 2 | 37 | 32 | 0.947368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a796e221aac177aacc85959dfeded0cdc2c98604 | 193 | py | Python | tests/utils/test_io.py | igrek51/glue | 6726ba977a21e58b354a5c97f68639f84184be7a | [
"MIT"
] | 6 | 2020-06-24T20:03:06.000Z | 2021-09-21T10:05:17.000Z | tests/utils/test_io.py | igrek51/cliglue | 6726ba977a21e58b354a5c97f68639f84184be7a | [
"MIT"
] | 2 | 2021-09-19T15:28:02.000Z | 2021-09-21T17:29:38.000Z | tests/utils/test_io.py | igrek51/cliglue | 6726ba977a21e58b354a5c97f68639f84184be7a | [
"MIT"
] | 2 | 2020-06-24T21:21:35.000Z | 2021-08-01T17:24:38.000Z | import sys
from nuclear.utils.input import input_required
def test_input_required():
sys.stdin = open('tests/utils/res/inputRequired')
assert input_required('required: ') == 'valid'
| 21.444444 | 53 | 0.740933 | 25 | 193 | 5.56 | 0.64 | 0.280576 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.139896 | 193 | 8 | 54 | 24.125 | 0.837349 | 0 | 0 | 0 | 0 | 0 | 0.227979 | 0.150259 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.2 | true | 0 | 0.4 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
a799cbe372da6f711a94f98bef5d06a774734308 | 134 | py | Python | indset/io/__init__.py | skyman/independent-set | e2b6a2dc231aba5d9391f305d61d556b928e4d3e | [
"MIT"
] | null | null | null | indset/io/__init__.py | skyman/independent-set | e2b6a2dc231aba5d9391f305d61d556b928e4d3e | [
"MIT"
] | null | null | null | indset/io/__init__.py | skyman/independent-set | e2b6a2dc231aba5d9391f305d61d556b928e4d3e | [
"MIT"
] | null | null | null | # coding=utf-8
from gridtxt import alignment_simulator_grid_loader, alignment_simulator_grid_saver
from save_metrics import MetricsIO
| 33.5 | 83 | 0.88806 | 19 | 134 | 5.894737 | 0.736842 | 0.321429 | 0.392857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00813 | 0.08209 | 134 | 3 | 84 | 44.666667 | 0.902439 | 0.089552 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
a7cc56df37f3a927c3aaa21af6d357d97127aad2 | 125 | py | Python | gimp_be/image/__init__.py | J216/gimp_be | 02cc0e9627bee491cf1e6d5102ce0a3f07f1043e | [
"MIT"
] | 3 | 2017-02-05T08:12:19.000Z | 2019-08-02T14:31:56.000Z | gimp_be/image/__init__.py | J216/gimp_be | 02cc0e9627bee491cf1e6d5102ce0a3f07f1043e | [
"MIT"
] | 1 | 2017-01-11T05:54:51.000Z | 2019-01-08T03:48:57.000Z | gimp_be/image/__init__.py | J216/gimp_be | 02cc0e9627bee491cf1e6d5102ce0a3f07f1043e | [
"MIT"
] | null | null | null | from image import *
from save import *
from layer import *
from signature import *
from resource import *
from exif import *
| 17.857143 | 23 | 0.76 | 18 | 125 | 5.277778 | 0.444444 | 0.526316 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.192 | 125 | 6 | 24 | 20.833333 | 0.940594 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ac63f4ea4ed51dfd0856c3af0efb2c31b384edea | 188 | py | Python | src/apps/trainings/admin/__init__.py | sanderland/katago-server | 6414fab080d007c05068a06ff4f25907b92848bd | [
"MIT"
] | 27 | 2020-05-03T11:01:27.000Z | 2022-03-17T05:33:10.000Z | src/apps/trainings/admin/__init__.py | sanderland/katago-server | 6414fab080d007c05068a06ff4f25907b92848bd | [
"MIT"
] | 54 | 2020-05-09T01:18:41.000Z | 2022-01-22T10:31:15.000Z | src/apps/trainings/admin/__init__.py | sanderland/katago-server | 6414fab080d007c05068a06ff4f25907b92848bd | [
"MIT"
] | 9 | 2020-09-29T11:31:32.000Z | 2022-03-09T01:37:50.000Z | from django.contrib import admin
from src.apps.trainings.admin.network_admin import NetworkAdmin
from src.apps.trainings.models import Network
admin.site.register(Network, NetworkAdmin)
| 26.857143 | 63 | 0.845745 | 26 | 188 | 6.076923 | 0.5 | 0.088608 | 0.139241 | 0.253165 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 188 | 6 | 64 | 31.333333 | 0.918605 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ac6a67aa84d401fc8c8a40b521e0ca587df6bf3d | 129 | py | Python | src/auth/models.py | Midburn/midburn-profiles-django | e124a617986931baed0b17f5d7a1ba6c7d5528e9 | [
"MIT"
] | null | null | null | src/auth/models.py | Midburn/midburn-profiles-django | e124a617986931baed0b17f5d7a1ba6c7d5528e9 | [
"MIT"
] | 20 | 2019-10-05T13:23:47.000Z | 2022-03-11T23:37:37.000Z | src/auth/models.py | Midburn/midburn-profiles-django | e124a617986931baed0b17f5d7a1ba6c7d5528e9 | [
"MIT"
] | 1 | 2019-09-30T11:40:45.000Z | 2019-09-30T11:40:45.000Z | # Create your models here.
from .auth_models import BurnerUser
from .auth_models.identifying_document import IdentifyingDocument
| 32.25 | 65 | 0.860465 | 16 | 129 | 6.75 | 0.6875 | 0.148148 | 0.259259 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.100775 | 129 | 3 | 66 | 43 | 0.931034 | 0.186047 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
ac78b9964fb8ac505304c914a1b1bb03d8c58992 | 102 | py | Python | office365/sharepoint/search/simpleDataTable.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | 544 | 2016-08-04T17:10:16.000Z | 2022-03-31T07:17:20.000Z | office365/sharepoint/search/simpleDataTable.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | 438 | 2016-10-11T12:24:22.000Z | 2022-03-31T19:30:35.000Z | office365/sharepoint/search/simpleDataTable.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | 202 | 2016-08-22T19:29:40.000Z | 2022-03-30T20:26:15.000Z | from office365.runtime.client_value import ClientValue
class SimpleDataTable(ClientValue):
pass
| 17 | 54 | 0.823529 | 11 | 102 | 7.545455 | 0.909091 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.033708 | 0.127451 | 102 | 5 | 55 | 20.4 | 0.898876 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
3babba673c6c87052fd77dbe0bbd22ffa14b5c14 | 168 | py | Python | containr.py | Albert5544/frontend | dab95f2a6ba95e8521c0e439da0081d2edf69cf9 | [
"MIT"
] | null | null | null | containr.py | Albert5544/frontend | dab95f2a6ba95e8521c0e439da0081d2edf69cf9 | [
"MIT"
] | 2 | 2021-02-02T22:48:24.000Z | 2021-06-02T02:04:53.000Z | containr.py | Albert5544/frontend | dab95f2a6ba95e8521c0e439da0081d2edf69cf9 | [
"MIT"
] | null | null | null | from app import app
from app.models import User, Dataset
@app.shell_context_processor
def make_shell_context():
return {'db': db, 'User': User, 'Dataset': Dataset} | 28 | 55 | 0.744048 | 25 | 168 | 4.84 | 0.52 | 0.115702 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.136905 | 168 | 6 | 55 | 28 | 0.834483 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | true | 0 | 0.4 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 6 |
3bb0cdd4feab271ed6bec8fc365425358672d08e | 21 | py | Python | mmn/modeling/mmn/__init__.py | MCG-NJU/MMN | 64641d12d4e1197b7378b3f786916b4cfd1a5080 | [
"MIT"
] | 48 | 2021-12-16T08:42:46.000Z | 2022-03-11T08:28:56.000Z | mmn/modeling/mmn/__init__.py | MCG-NJU/MMN | 64641d12d4e1197b7378b3f786916b4cfd1a5080 | [
"MIT"
] | 4 | 2021-12-17T07:15:51.000Z | 2022-02-21T12:28:32.000Z | mmn/modeling/mmn/__init__.py | MCG-NJU/MMN | 64641d12d4e1197b7378b3f786916b4cfd1a5080 | [
"MIT"
] | 5 | 2021-12-16T08:48:52.000Z | 2022-03-16T08:57:34.000Z | from .mmn import MMN
| 10.5 | 20 | 0.761905 | 4 | 21 | 4 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.190476 | 21 | 1 | 21 | 21 | 0.941176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3bced2f2d82710d5e7e67fec776258d980272e56 | 109 | py | Python | src/tap_apple_search_ads/api/auth/cache/utils.py | mighty-digital/tap-apple-search-ads | de7de13509c06e4ce4ef89884b23a9b9d7182d56 | [
"MIT"
] | 1 | 2022-01-18T15:04:40.000Z | 2022-01-18T15:04:40.000Z | src/tap_apple_search_ads/api/auth/cache/utils.py | mighty-digital/tap-apple-search-ads | de7de13509c06e4ce4ef89884b23a9b9d7182d56 | [
"MIT"
] | null | null | null | src/tap_apple_search_ads/api/auth/cache/utils.py | mighty-digital/tap-apple-search-ads | de7de13509c06e4ce4ef89884b23a9b9d7182d56 | [
"MIT"
] | null | null | null | import datetime
def now() -> float:
return datetime.datetime.now(tz=datetime.timezone.utc).timestamp()
| 18.166667 | 70 | 0.733945 | 14 | 109 | 5.714286 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12844 | 109 | 5 | 71 | 21.8 | 0.842105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | true | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 6 |
3bd1899d5fcf684eecf9c0a19d6a4c80a2612351 | 23 | py | Python | util/__init__.py | IRIS-Team/tchecker | 7f0b3218f8914ac3d96eadc8a7db1c2d23016503 | [
"MIT"
] | 17 | 2021-09-05T11:35:03.000Z | 2022-03-23T19:06:54.000Z | util/__init__.py | IRIS-Team/tchecker | 7f0b3218f8914ac3d96eadc8a7db1c2d23016503 | [
"MIT"
] | 2 | 2021-09-04T11:06:02.000Z | 2021-09-06T06:40:08.000Z | util/__init__.py | IRIS-Team/tchecker | 7f0b3218f8914ac3d96eadc8a7db1c2d23016503 | [
"MIT"
] | 5 | 2021-09-04T13:13:59.000Z | 2022-02-19T00:22:56.000Z | from util.core import * | 23 | 23 | 0.782609 | 4 | 23 | 4.5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 23 | 1 | 23 | 23 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
3bf756a3c1629a3c445635cf24f00539b35d567f | 24 | py | Python | source/model/__init__.py | tailerr/QANet-pytorch | 23bfb9059894aeec9ed82dbd775b07de3269a314 | [
"MIT"
] | 1 | 2019-09-04T15:04:28.000Z | 2019-09-04T15:04:28.000Z | source/model/__init__.py | tailerr/QANet-pytorch | 23bfb9059894aeec9ed82dbd775b07de3269a314 | [
"MIT"
] | null | null | null | source/model/__init__.py | tailerr/QANet-pytorch | 23bfb9059894aeec9ed82dbd775b07de3269a314 | [
"MIT"
] | null | null | null | from .model import QANet | 24 | 24 | 0.833333 | 4 | 24 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125 | 24 | 1 | 24 | 24 | 0.952381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
cbf89dff44df117689d1d42fbef96c32e224f925 | 3,722 | py | Python | src/pybind/tests/test_io_util.py | yaguanghu/kaldi | 522b8dd0b29ef293ee56e7e5da338351e960a4e7 | [
"Apache-2.0"
] | null | null | null | src/pybind/tests/test_io_util.py | yaguanghu/kaldi | 522b8dd0b29ef293ee56e7e5da338351e960a4e7 | [
"Apache-2.0"
] | null | null | null | src/pybind/tests/test_io_util.py | yaguanghu/kaldi | 522b8dd0b29ef293ee56e7e5da338351e960a4e7 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2020 Mobvoi AI Lab, Beijing, China (author: Fangjun Kuang)
# Apache 2.0
import unittest
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir))
import shutil
from tempfile import mkdtemp
import numpy as np
import kaldi
class TestIOUtil(unittest.TestCase):
def test_read_vec_int(self):
tmp = mkdtemp()
for binary in [True, False]:
if binary:
wspecifier = 'ark,scp:{dir}/ali.ark,{dir}/ali.scp'.format(
dir=tmp)
else:
wspecifier = 'ark,scp,t:{dir}/ali.ark,{dir}/ali.scp'.format(
dir=tmp)
data = dict()
key1 = 'key1'
value1 = [0, 1, 3, 2]
writer = kaldi.IntVectorWriter(wspecifier)
writer.Write(key1, value1)
data[key1] = value1
key2 = 'key2'
value2 = [1, 2, 3, 4, 5, 6]
writer.Write(key2, value2)
data[key2] = value2
writer.Close()
filename = '{}/ali.scp'.format(tmp)
with open(filename, 'r') as f:
for line in f:
key, rxfilename = line.split()
value = kaldi.read_vec_int(rxfilename)
self.assertTrue(key in data)
self.assertEqual(value, data[key])
shutil.rmtree(tmp)
def test_read_vec_flt(self):
tmp = mkdtemp()
for binary in [True, False]:
if binary:
wspecifier = 'ark,scp:{dir}/test.ark,{dir}/test.scp'.format(
dir=tmp)
else:
wspecifier = 'ark,scp,t:{dir}/test.ark,{dir}/test.scp'.format(
dir=tmp)
data = dict()
key1 = 'key1'
value1 = np.arange(3).astype(np.float32)
writer = kaldi.VectorWriter(wspecifier)
writer.Write(key1, value1)
data[key1] = value1
key2 = 'key2'
value2 = value1 * 10
writer.Write(key2, value2)
data[key2] = value2
writer.Close()
filename = '{}/test.scp'.format(tmp)
with open(filename, 'r') as f:
for line in f:
key, rxfilename = line.split()
value = kaldi.read_vec_flt(rxfilename)
self.assertTrue(key in data)
np.testing.assert_array_equal(value, data[key])
shutil.rmtree(tmp)
def test_read_mat(self):
tmp = mkdtemp()
for binary in [True, False]:
if binary:
wspecifier = 'ark,scp:{dir}/test.ark,{dir}/test.scp'.format(
dir=tmp)
else:
wspecifier = 'ark,scp,t:{dir}/test.ark,{dir}/test.scp'.format(
dir=tmp)
data = dict()
key1 = 'key1'
value1 = np.arange(6 * 8).reshape(6, 8).astype(np.float32)
writer = kaldi.MatrixWriter(wspecifier)
writer.Write(key1, value1)
data[key1] = value1
key2 = 'key2'
value2 = value1 * 10
writer.Write(key2, value2)
data[key2] = value2
writer.Close()
filename = '{}/test.scp'.format(tmp)
with open(filename, 'r') as f:
for line in f:
key, rxfilename = line.split()
value = kaldi.read_mat(rxfilename)
self.assertTrue(key in data)
np.testing.assert_array_equal(value, data[key])
shutil.rmtree(tmp)
if __name__ == '__main__':
unittest.main()
| 28.412214 | 78 | 0.494895 | 412 | 3,722 | 4.400485 | 0.25 | 0.044677 | 0.052951 | 0.049641 | 0.779923 | 0.751241 | 0.733039 | 0.733039 | 0.733039 | 0.672366 | 0 | 0.032982 | 0.389038 | 3,722 | 130 | 79 | 28.630769 | 0.764292 | 0.027136 | 0 | 0.705263 | 0 | 0 | 0.080431 | 0.061913 | 0 | 0 | 0 | 0 | 0.063158 | 1 | 0.031579 | false | 0 | 0.073684 | 0 | 0.115789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0255268542814fb9d84087adeee30eac525aad01 | 108 | py | Python | src/blender/blender_autocomplete-master/2.92/gpu/__init__.py | JonasWard/ClayAdventures | a716445ac690e4792e70658319aa1d5299f9c9e9 | [
"MIT"
] | 1 | 2020-03-25T10:55:10.000Z | 2020-03-25T10:55:10.000Z | src/blender/blender_autocomplete-master/2.92/gpu/__init__.py | JonasWard/ClayAdventures | a716445ac690e4792e70658319aa1d5299f9c9e9 | [
"MIT"
] | null | null | null | src/blender/blender_autocomplete-master/2.92/gpu/__init__.py | JonasWard/ClayAdventures | a716445ac690e4792e70658319aa1d5299f9c9e9 | [
"MIT"
] | null | null | null | import sys
import typing
from . import matrix
from . import select
from . import types
from . import shader
| 15.428571 | 20 | 0.777778 | 16 | 108 | 5.25 | 0.5 | 0.47619 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.185185 | 108 | 6 | 21 | 18 | 0.954545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
5a51a212597151e2e10d180c8742335c8e7b2e25 | 392 | py | Python | Configuration/Eras/python/Era_Phase2_timing_layer_bar_cff.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 3 | 2018-08-24T19:10:26.000Z | 2019-02-19T11:45:32.000Z | Configuration/Eras/python/Era_Phase2_timing_layer_bar_cff.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 7 | 2016-07-17T02:34:54.000Z | 2019-08-13T07:58:37.000Z | Configuration/Eras/python/Era_Phase2_timing_layer_bar_cff.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 5 | 2018-08-21T16:37:52.000Z | 2020-01-09T13:33:17.000Z | import FWCore.ParameterSet.Config as cms
from Configuration.Eras.Era_Phase2_timing_cff import Phase2_timing
from Configuration.Eras.Modifier_phase2_timing_layer_cff import phase2_timing_layer
from Configuration.Eras.Modifier_phase2_timing_layer_bar_cff import phase2_timing_layer_bar
Phase2_timing_layer_bar = cms.ModifierChain(Phase2_timing, phase2_timing_layer, phase2_timing_layer_bar)
| 43.555556 | 104 | 0.903061 | 57 | 392 | 5.736842 | 0.298246 | 0.366972 | 0.363914 | 0.244648 | 0.440367 | 0.281346 | 0.281346 | 0 | 0 | 0 | 0 | 0.027174 | 0.061224 | 392 | 8 | 105 | 49 | 0.861413 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.8 | 0 | 0.8 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
5a64de55c70541277ef7098ff53fbe8efdf73ce7 | 28,059 | py | Python | centrality.py | Jupaoqq/TwitterPred | b0a486dde0b8622b8c834d41885bc7a35f096f49 | [
"MIT"
] | 1 | 2022-02-03T10:57:24.000Z | 2022-02-03T10:57:24.000Z | centrality.py | Jupaoqq/TwitterPred | b0a486dde0b8622b8c834d41885bc7a35f096f49 | [
"MIT"
] | null | null | null | centrality.py | Jupaoqq/TwitterPred | b0a486dde0b8622b8c834d41885bc7a35f096f49 | [
"MIT"
] | null | null | null | import string
import networkx as nx
import pandas as pd
import sys
import spacy
import textacy.vsm
from textacy.vsm import Vectorizer
import time
import os
import re
import csv
import unicodedata
import numpy as np
from io import StringIO
from datetime import datetime
from random import uniform
# Happy Emoticons
emoticons_happy = set([
':-)', ':)', ';)', ':o)', ':]', ':3', ':c)', ':>', '=]', '8)', '=)', ':}',
':^)', ':-D', ':D', '8-D', '8D', 'x-D', 'xD', 'X-D', 'XD', '=-D', '=D',
'=-3', '=3', ':-))', ":'-)", ":')", ':*', ':^*', '>:P', ':-P', ':P', 'X-P',
'x-p', 'xp', 'XP', ':-p', ':p', '=p', ':-b', ':b', '>:)', '>;)', '>:-)',
'<3'
])
# Sad Emoticons
emoticons_sad = set([
':L', ':-/', '>:/', ':S', '>:[', ':@', ':-(', ':[', ':-||', '=L', ':<',
':-[', ':-<', '=\\', '=/', '>:(', ':(', '>.<', ":'-(", ":'(", ':\\', ':-c',
':c', ':{', '>:\\', ';('
])
# all emoticons (happy + sad)
emoticons = emoticons_happy.union(emoticons_sad)
data = pd.read_csv('user.csv', header=None)
temp_user = str(data[1][4])
cont = 0
cont_e = 0
reply = 0
rt = 0
lk = 0
quote = 0
na = 0
param = 50
param_two = 100
ctt = True
def clean_tweets(tweet):
# remove stock market tickers like $GE
tweet = tweet[2:-1]
tweet = re.sub(r'\$\w*', '', tweet)
# remove old style retweet text "RT"
tweet = re.sub(r'^RT[\s]+', '', tweet)
# remove hyperlinks
tweet = re.sub(r'https?:\/\/.*[\r\n]*', '', tweet)
# remove hashtags
# only removing the hash # sign from the word
tweet = re.sub(r'#', '', tweet)
tweet = re.sub(r'@', '', tweet)
tweet = re.sub(r'\\x..', '', tweet)
tweet = re.sub(r'\\n', ' ', tweet)
tweet = tweet.lower()
# tweet = re.sub(r'\\x\s\S', '', tweet)
# tweet = re.sub(r'\\x\d\D', '', tweet)
# tweet = re.sub(r'\\x\S\s', '', tweet)
# tweet = re.sub(r'\\x\D\d', '', tweet)
# tweet = re.sub(r'\\x\d\d', '', tweet)
# tweet = re.sub(r'\\x\s\s', '', tweet)
return tweet
def calculate_centrality_followee(data):
global cont, cont_e, ctt, reply, quote, rt, like, param, na, param_two
for i, r in data.iterrows():
if (na > param):
print("Completed")
break
else:
duration = 1000000000000000000
s = ''
min_interval = 10000000000000
dfb = next(iter(data_one[data_one['origin_id']==r['id_str']].index), 'no match')
if dfb == "no match":
na = na + 1
print ('No Action # %d' % na)
loc_ind = 0
for ia, ra in data_one.iterrows():
duration_raw = ra['created_at'] - r['created_at']
# print (ra['created_at'])
# print (r['created_at'])
# print (duration_raw)
duration = duration_raw.total_seconds()
if duration > 0:
if duration < min_interval:
min_interval = duration
loc_ind = ia
if min_interval == 10000000000000:
part_data = data_one
else:
if (loc_ind < param_two):
part_data = data_one.iloc[0:loc_ind-1]
else:
part_data = data_one.iloc[loc_ind-(param_two-1):loc_ind-1]
if r['status'] == 'like':
s = r['origin_full_text']
else:
s = r['full_text']
else:
part_data = pd.DataFrame()
s = 'null'
for index, row in part_data.iterrows():
if not row['full_text'] == 'null':
docs = clean_tweets(row['full_text'])
doc = nlp(docs)
corpus.add_doc(doc)
token_list = []
for token in doc:
if any(c.isalpha() for c in str(token)) and not token.is_stop:
token_list.append(token.lemma_)
if str(token.lemma_) not in G:
G.add_node(str(token.lemma_), decay = 0, checked = 'n')
for i_two in range(len(token_list)):
count = i_two + 1
while count < len(token_list):
if G.has_edge(token_list[i_two], token_list[count]):
G[token_list[i_two]][token_list[count]]['weight'] += 1
else:
G.add_edge(token_list[i_two],token_list[count], weight=1)
count = count + 1
else :
pass
if (s == 'null'):
pass
else:
docs_two = clean_tweets(s)
doc_two = nlp(docs_two)
corpus.add_doc(doc_two)
token_list = []
for token in doc_two:
if any(c.isalpha() for c in str(token)) and not token.is_stop:
token_list.append(token.lemma_)
if str(token.lemma_) not in G:
G.add_node(str(token.lemma_),decay = 1,checked = 'y')
#G.add_node(str(token.lemma_.lower()), weight = 1)
else:
attrs = {str(token.lemma_): {'decay': 1, 'checked':'y'}}
nx.set_node_attributes(G, attrs)
# for i_two in range(len(token_list)):
# count = i_two + 1
# while count < len(token_list):
# if G.has_edge(token_list[i_two], token_list[count]):
# G[token_list[i_two]][token_list[count]]['weight'] += 1
# else:
# G.add_edge(token_list[i_two],token_list[count], weight=1)
# count = count + 1
for token in doc_two:
if any(c.isalpha() for c in str(token)) and not token.is_stop:
# print (G[str(token.lemma_)])
for n in G.neighbors(str(token.lemma_)):
if G.node[n]['checked'] == 'n':
G.node[n]['checked'] = 'y'
# print (G.node[n]['checked'])
G.node[n]['decay'] = G.node[str(token.lemma_)]['decay']*0.5
# print (G.node[n]['decay'])
for i_two in range(len(token_list)):
count = i_two + 1
while count < len(token_list):
if G.has_edge(token_list[i_two], token_list[count]):
G[token_list[i_two]][token_list[count]]['weight'] += 1
else:
G.add_edge(token_list[i_two],token_list[count], weight=1)
count = count + 1
vectorizer = textacy.vsm.Vectorizer(apply_idf=True, norm=None, idf_type='standard')
tokenized_docs = [doc._.to_terms_list(ngrams=1, named_entities=True, as_strings=True, filter_stops=True, normalize='lemma') for doc in corpus]
doc_term_matrix = vectorizer.fit_transform(tokenized_docs)
a = doc_term_matrix.toarray()
start_time = time.time()
try:
centrality1 = nx.degree_centrality(G)
except:
centrality1 = 0
print ("error")
print("Calculating degree_centrality")
t1 = time.time()
elapsed_time1 = t1 - start_time
print ("Time elapsed: %s seconds" % (elapsed_time1) )
dfbase = pd.DataFrame(list(centrality1.items()), columns = ['node', 'degree_centrality'])
#centrality_list = ['eigenvector_centrality']
centrality_list = ['eigenvector_centrality','closeness_centrality','betweenness_centrality','load_centrality','subgraph_centrality','harmonic_centrality']
temp_time = time.time()
for ct in centrality_list:
try:
print ("Calculating %s" % ct)
centrality = getattr(nx, ct)(G)
print ("Time elapsed: %s seconds" % (time.time() - temp_time) )
temp_time = time.time()
dfbase[ct]=dfbase['node'].map(centrality)
except Exception as e:
print ("error")
dfbase[ct]=0
#for token in doc_two:
#print(token.text)
token_list_two = pd.DataFrame()
# print (dfbase['node'])
# print (dfbase['degree_centrality'])
for sent in doc_two.sents:
for token in sent:
# print (token)
if any(c.isalpha() for c in str(token)) and not token.is_stop:
rr = dfbase.loc[dfbase['node'] == token.lemma_]
# print (rr)
if not rr.empty :
# print (rr)
token_list_two = token_list_two.append(rr)
if not token_list_two.empty:
centrality_list_full = ['degree_centrality','eigenvector_centrality','closeness_centrality','betweenness_centrality','load_centrality','subgraph_centrality','harmonic_centrality']
for itm in centrality_list_full:
kwargs = {('weighted_%s' % itm) : lambda x: 0.0}
token_list_two = token_list_two.assign(**kwargs)
for i, row in token_list_two.iterrows():
vb = a.item(((int(a.size/a[0].size))-1),(vectorizer.vocabulary_terms.get(row['node'])))
for it in centrality_list_full:
# print (vec.vocabulary_terms)
# print (row['node'])
va = row[it]
# print ('vb')
# print (vb)
c = va*vb
# print ('c')
# print (c)
#print(va)
#print (vb)
token_list_two.set_value(i, ('weighted_%s' % it), c)
#print (('weighted_%s' % it))
#print (row[('weighted_%s' % it)])
#print ("token_list")
#print (token_list)
# print (token_list_two['weighted_degree_centrality'])
tk_sum = pd.DataFrame()
if r['status'] == 'like':
tk_sum = tk_sum.assign(node = [r['origin_id']])
else:
tk_sum = tk_sum.assign(node = [r['id_str']])
tk_sum = tk_sum.assign(screen_name = [r['screen_name']])
tk_sum = tk_sum.assign(created_at = [r['created_at']])
tk_sum = tk_sum.assign(full_text = [r['full_text']])
tk_sum = tk_sum.assign(truncated = [r['truncated']])
tk_sum = tk_sum.assign(user_description = [r['len(user.description)']])
tk_sum = tk_sum.assign(user_followers_count = [r['user.followers_count']])
tk_sum = tk_sum.assign(user_friends_count= [r['user.friends_count']])
tk_sum = tk_sum.assign(user_listed_count = [r['user.listed_count']])
tk_sum = tk_sum.assign(user_statuses_count = [r['user.statuses_count']])
tk_sum = tk_sum.assign(user_favourites_count = [r['user.favourites_count']])
tk_sum = tk_sum.assign(user_created_at = [r['user.created_at']])
tk_sum = tk_sum.assign(entities_hashtags = [r['entities.hashtags']])
tk_sum = tk_sum.assign(entities_user_mentions = [r['entities.user_mentions']])
tk_sum = tk_sum.assign(entities_symbols = [r['entities.symbols']])
tk_sum = tk_sum.assign(entities_polls = [r['bool(entities.polls)']])
tk_sum = tk_sum.assign(entities_media_type = [r['entities.media_type']])
tk_sum = tk_sum.assign(retweet_coun = [r['retweet_count']])
tk_sum = tk_sum.assign(favorite_count = [r['favorite_count']])
tk_sum = tk_sum.assign(origin_id = [r['origin_id']])
tk_sum = tk_sum.assign(origin_created_at = [r['origin_created_at']])
tk_sum = tk_sum.assign(origin_full_text = [r['origin_full_text']])
tk_sum = tk_sum.assign(origin_user = [r['origin_user']])
tk_sum = tk_sum.assign(origin_user_len_description = [r['origin_user_len_description']])
tk_sum = tk_sum.assign(origin_user_followers_count = [r['origin_user_followers_count']])
tk_sum = tk_sum.assign(origin_user_friends_count = [r['origin_user_friends_count']])
tk_sum = tk_sum.assign(origin_user_listed_count = [r['origin_user_listed_count']])
tk_sum = tk_sum.assign(origin_user_statuses_count = [r['origin_user_statuses_count']])
tk_sum = tk_sum.assign(origin_user_favourites_count = [r['origin_user_favourites_count']])
tk_sum = tk_sum.assign(origin_user_created_at = [r['origin_user_created_at']])
tk_sum = tk_sum.assign(origin_entities_hashtags = [r['origin_entities_hashtags']])
tk_sum = tk_sum.assign(origin_entities_user_mentions = [r['origin_entities_user_mentions']])
tk_sum = tk_sum.assign(origin_entities_symbols = [r['origin_entities_symbols']])
tk_sum = tk_sum.assign(origin_entities_polls = [r['origin_entities_polls']])
tk_sum = tk_sum.assign(origin_entities_media_type = [r['origin_entities_media_type']])
tk_sum = tk_sum.assign(origin_retweet_count = [r['origin_retweet_count']])
tk_sum = tk_sum.assign(origin_favorite_count = [r['origin_favorite_count']])
tk_sum = tk_sum.assign(origin_status = [r['origin_status']])
try:
tk_sum = tk_sum.assign(min_interval = duration)
except:
tk_sum = tk_sum.assign(min_interval = 100000000000000000)
for itm in centrality_list_full:
# print (token_list[('weighted_%s' % itm)])
kwargs = {itm : lambda x: [token_list_two[('%s' % itm)].sum()]}
try:
tk_sum = tk_sum.assign(**kwargs)
except:
kwargs = {itm : lambda x: 0.0}
tk_sum = tk_sum.assign(**kwargs)
for itm in centrality_list_full:
# print (token_list[('weighted_%s' % itm)])
# print (token_list_two[('weighted_%s' % itm)].sum())
kwargs = {('weighted_%s' % itm) : lambda x: [token_list_two[('weighted_%s' % itm)].sum()]}
try:
tk_sum = tk_sum.assign(**kwargs)
except:
kwargs = {('weighted_%s' % itm) : lambda x: 0.0}
tk_sum = tk_sum.assign(**kwargs)
try:
tk_sum = tk_sum.assign(status = ['No action'])
except:
tk_sum = tk_sum.assign(status = ['unknown'])
# if ind == 1:
# try:
# tk_sum = tk_sum.assign(status = ['no action'])
# except:
# tk_sum = tk_sum.assign(status = ['unknown'])
#print (tk_sum)
# .reset_index()
# token_list.sort_values(by=list(token_list.columns),axis=0)
# token_list.reset_index()
#tk = token_list.iloc[[0]]
# tk = tk.assign(status = r['status'])
cont = cont + 1
print ("%d interactions collected" % cont)
with open(os.path.join(tweetnetworks,'%s_interaction.csv' % temp_user), 'a') as f:
writer = csv.writer(f)
#tk.to_csv(f, header=False,index=False)
tk_sum.to_csv(f, header=False,index=False)
else:
cont_e = cont_e + 1
print ("%dth interaction with zero centralities" % cont_e)
part_data = pd.DataFrame()
def calculate_centrality_original(data):
global cont, cont_e, ctt, reply, quote, rt, lk, param, na, param_two
for i, r in data.iterrows():
if i == 0:
pass
else:
if r['status'] == "reply":
if reply > param:
ctt = False
else:
print ('reply # %d' % reply)
reply = reply + 1
elif r['status'] == "quote":
if quote > param:
ctt = False
else:
print ('quote # %d' % quote)
quote = quote + 1
elif r['status'] == "retweet":
if rt > param:
ctt = False
else:
print ('retweet # %d' % rt)
rt = rt + 1
elif r['status'] == "like":
if lk > param:
ctt = False
else:
print ('like # %d' % lk)
lk = lk + 1
elif (r['status'] == 'original'):
ctt = False
else:
ctt = True
# if (lk > param) and (rt > param) and (quote > param) and (reply > param) and (na > param):
# print("Completed")
# break
if (not ctt):
pass
ctt = True
else:
print (i)
duration = 1000000000000000000
s = ''
if not r['status'] == 'original' and not r['origin_full_text'] == 'null':
if (i < param_two):
part_data = data_one.iloc[0:i-1]
else:
part_data = data_one.iloc[i-(param_two-1):i-1]
duration_raw = r['created_at'] - datetime.strptime(r['origin_created_at'], '%Y-%m-%d %H:%M:%S')
duration = duration_raw.total_seconds()
if duration == 0:
duration = uniform(0, 10800)
s = r['origin_full_text']
for index, row in part_data.iterrows():
if not row['full_text'] == 'null':
docs = clean_tweets(row['full_text'])
doc = nlp(docs)
corpus.add_doc(doc)
token_list = []
for token in doc:
if any(c.isalpha() for c in str(token)) and not token.is_stop:
token_list.append(token.lemma_)
if str(token.lemma_) not in G:
G.add_node(str(token.lemma_), decay = 0, checked = 'n')
for i_two in range(len(token_list)):
count = i_two + 1
while count < len(token_list):
if G.has_edge(token_list[i_two], token_list[count]):
G[token_list[i_two]][token_list[count]]['weight'] += 1
else:
G.add_edge(token_list[i_two],token_list[count], weight=1)
count = count + 1
else :
pass
if (s == 'null'):
pass
else:
docs_two = clean_tweets(s)
doc_two = nlp(docs_two)
corpus.add_doc(doc_two)
token_list = []
for token in doc_two:
if any(c.isalpha() for c in str(token)) and not token.is_stop:
token_list.append(token.lemma_)
if str(token.lemma_) not in G:
G.add_node(str(token.lemma_),decay = 1,checked = 'y')
#G.add_node(str(token.lemma_.lower()), weight = 1)
else:
attrs = {str(token.lemma_): {'decay': 1, 'checked':'y'}}
nx.set_node_attributes(G, attrs)
# for i_two in range(len(token_list)):
# count = i_two + 1
# while count < len(token_list):
# if G.has_edge(token_list[i_two], token_list[count]):
# G[token_list[i_two]][token_list[count]]['weight'] += 1
# else:
# G.add_edge(token_list[i_two],token_list[count], weight=1)
# count = count + 1
for token in doc_two:
if any(c.isalpha() for c in str(token)) and not token.is_stop:
# print (G[str(token.lemma_)])
for n in G.neighbors(str(token.lemma_)):
if G.node[n]['checked'] == 'n':
G.node[n]['checked'] = 'y'
# print (G.node[n]['checked'])
G.node[n]['decay'] = G.node[str(token.lemma_)]['decay']*0.5
# print (G.node[n]['decay'])
for i_two in range(len(token_list)):
count = i_two + 1
while count < len(token_list):
if G.has_edge(token_list[i_two], token_list[count]):
G[token_list[i_two]][token_list[count]]['weight'] += 1
else:
G.add_edge(token_list[i_two],token_list[count], weight=1)
count = count + 1
vectorizer = textacy.vsm.Vectorizer(apply_idf=True, norm=None, idf_type='standard')
tokenized_docs = [doc._.to_terms_list(ngrams=1, named_entities=True, as_strings=True, filter_stops=True, normalize='lemma') for doc in corpus]
doc_term_matrix = vectorizer.fit_transform(tokenized_docs)
a = doc_term_matrix.toarray()
start_time = time.time()
try:
centrality1 = nx.degree_centrality(G)
except:
centrality1 = 0
print ("error")
print("Calculating degree_centrality")
t1 = time.time()
elapsed_time1 = t1 - start_time
print ("Time elapsed: %s seconds" % (elapsed_time1) )
dfbase = pd.DataFrame(list(centrality1.items()), columns = ['node', 'degree_centrality'])
#centrality_list = ['eigenvector_centrality']
centrality_list = ['eigenvector_centrality','closeness_centrality','betweenness_centrality','load_centrality','subgraph_centrality','harmonic_centrality']
temp_time = time.time()
for ct in centrality_list:
try:
print ("Calculating %s" % ct)
centrality = getattr(nx, ct)(G)
print ("Time elapsed: %s seconds" % (time.time() - temp_time) )
temp_time = time.time()
dfbase[ct]=dfbase['node'].map(centrality)
except Exception as e:
print ("error")
dfbase[ct]=0
#for token in doc_two:
#print(token.text)
token_list_two = pd.DataFrame()
# print (dfbase['node'])
# print (dfbase['degree_centrality'])
for sent in doc_two.sents:
for token in sent:
# print (token)
if any(c.isalpha() for c in str(token)) and not token.is_stop:
rr = dfbase.loc[dfbase['node'] == token.lemma_]
# print (rr)
if not rr.empty :
# print (rr)
token_list_two = token_list_two.append(rr)
if not token_list_two.empty:
centrality_list_full = ['degree_centrality','eigenvector_centrality','closeness_centrality','betweenness_centrality','load_centrality','subgraph_centrality','harmonic_centrality']
for itm in centrality_list_full:
kwargs = {('weighted_%s' % itm) : lambda x: 0.0}
token_list_two = token_list_two.assign(**kwargs)
for i, row in token_list_two.iterrows():
vb = a.item(((int(a.size/a[0].size))-1),(vectorizer.vocabulary_terms.get(row['node'])))
for it in centrality_list_full:
# print (vec.vocabulary_terms)
# print (row['node'])
va = row[it]
# print ('vb')
# print (vb)
c = va*vb
# print ('c')
# print (c)
#print(va)
#print (vb)
token_list_two.set_value(i, ('weighted_%s' % it), c)
#print (('weighted_%s' % it))
#print (row[('weighted_%s' % it)])
#print ("token_list")
#print (token_list)
# print (token_list_two['weighted_degree_centrality'])
tk_sum = pd.DataFrame()
if r['status'] == 'like':
tk_sum = tk_sum.assign(node = [r['origin_id']])
else:
tk_sum = tk_sum.assign(node = [r['id_str']])
tk_sum = tk_sum.assign(screen_name = [r['screen_name']])
tk_sum = tk_sum.assign(created_at = [r['created_at']])
tk_sum = tk_sum.assign(full_text = [r['full_text']])
tk_sum = tk_sum.assign(truncated = [r['truncated']])
tk_sum = tk_sum.assign(user_description = [r['len(user.description)']])
tk_sum = tk_sum.assign(user_followers_count = [r['user.followers_count']])
tk_sum = tk_sum.assign(user_friends_count= [r['user.friends_count']])
tk_sum = tk_sum.assign(user_listed_count = [r['user.listed_count']])
tk_sum = tk_sum.assign(user_statuses_count = [r['user.statuses_count']])
tk_sum = tk_sum.assign(user_favourites_count = [r['user.favourites_count']])
tk_sum = tk_sum.assign(user_created_at = [r['user.created_at']])
tk_sum = tk_sum.assign(entities_hashtags = [r['entities.hashtags']])
tk_sum = tk_sum.assign(entities_user_mentions = [r['entities.user_mentions']])
tk_sum = tk_sum.assign(entities_symbols = [r['entities.symbols']])
tk_sum = tk_sum.assign(entities_polls = [r['bool(entities.polls)']])
tk_sum = tk_sum.assign(entities_media_type = [r['entities.media_type']])
tk_sum = tk_sum.assign(retweet_coun = [r['retweet_count']])
tk_sum = tk_sum.assign(favorite_count = [r['favorite_count']])
tk_sum = tk_sum.assign(origin_id = [r['origin_id']])
tk_sum = tk_sum.assign(origin_created_at = [r['origin_created_at']])
tk_sum = tk_sum.assign(origin_full_text = [r['origin_full_text']])
tk_sum = tk_sum.assign(origin_user = [r['origin_user']])
tk_sum = tk_sum.assign(origin_user_len_description = [r['origin_user_len_description']])
tk_sum = tk_sum.assign(origin_user_followers_count = [r['origin_user_followers_count']])
tk_sum = tk_sum.assign(origin_user_friends_count = [r['origin_user_friends_count']])
tk_sum = tk_sum.assign(origin_user_listed_count = [r['origin_user_listed_count']])
tk_sum = tk_sum.assign(origin_user_statuses_count = [r['origin_user_statuses_count']])
tk_sum = tk_sum.assign(origin_user_favourites_count = [r['origin_user_favourites_count']])
tk_sum = tk_sum.assign(origin_user_created_at = [r['origin_user_created_at']])
tk_sum = tk_sum.assign(origin_entities_hashtags = [r['origin_entities_hashtags']])
tk_sum = tk_sum.assign(origin_entities_user_mentions = [r['origin_entities_user_mentions']])
tk_sum = tk_sum.assign(origin_entities_symbols = [r['origin_entities_symbols']])
tk_sum = tk_sum.assign(origin_entities_polls = [r['origin_entities_polls']])
tk_sum = tk_sum.assign(origin_entities_media_type = [r['origin_entities_media_type']])
tk_sum = tk_sum.assign(origin_retweet_count = [r['origin_retweet_count']])
tk_sum = tk_sum.assign(origin_favorite_count = [r['origin_favorite_count']])
tk_sum = tk_sum.assign(origin_status = [r['origin_status']])
try:
tk_sum = tk_sum.assign(min_interval = duration)
except:
tk_sum = tk_sum.assign(min_interval = 100000000000000000)
for itm in centrality_list_full:
# print (token_list[('weighted_%s' % itm)])
kwargs = {itm : lambda x: [token_list_two[('%s' % itm)].sum()]}
try:
tk_sum = tk_sum.assign(**kwargs)
except:
kwargs = {itm : lambda x: 0.0}
tk_sum = tk_sum.assign(**kwargs)
for itm in centrality_list_full:
# print (token_list[('weighted_%s' % itm)])
# print (token_list_two[('weighted_%s' % itm)].sum())
kwargs = {('weighted_%s' % itm) : lambda x: [token_list_two[('weighted_%s' % itm)].sum()]}
try:
tk_sum = tk_sum.assign(**kwargs)
except:
kwargs = {('weighted_%s' % itm) : lambda x: 0.0}
tk_sum = tk_sum.assign(**kwargs)
try:
tk_sum = tk_sum.assign(status = [r['status']])
except:
tk_sum = tk_sum.assign(status = ['unknown'])
# if ind == 1:
# try:
# tk_sum = tk_sum.assign(status = ['no action'])
# except:
# tk_sum = tk_sum.assign(status = ['unknown'])
#print (tk_sum)
# .reset_index()
# token_list.sort_values(by=list(token_list.columns),axis=0)
# token_list.reset_index()
#tk = token_list.iloc[[0]]
# tk = tk.assign(status = r['status'])
cont = cont + 1
print ("%d interactions collected" % cont)
with open(os.path.join(tweetnetworks,'%s_interaction.csv' % temp_user), 'a') as f:
writer = csv.writer(f)
#tk.to_csv(f, header=False,index=False)
tk_sum.to_csv(f, header=False,index=False)
else:
cont_e = cont_e + 1
print ("%dth interaction with zero centralities" % cont_e)
part_data = pd.DataFrame()
if __name__ == '__main__':
tweetnetworks = 'collection'
if not os.path.exists(tweetnetworks):
os.makedirs(tweetnetworks)
with open(os.path.join(tweetnetworks,'%s_interaction.csv' % temp_user), 'a') as f:
writer = csv.writer(f)
#writer.writerow(['node','degree_centrality','eigenvector_centrality','status'])
writer.writerow([
'node',
'screen_name',
'created_at',
'full_text',
'truncated',
'len(user.description)',
'user.followers_count',
'user.friends_count',
'user.listed_count',
'user.statuses_count',
'user.favourites_count',
'user.created_at',
'entities.hashtags',
'entities.user_mentions',
'entities.symbols',
'bool(entities.polls)',
'entities.media_type',
'retweet_count',
'favorite_count',
'origin_id',
'origin_created_at',
'origin_full_text',
'origin_user',
'origin_user_len_description',
'origin_user_followers_count',
'origin_user_friends_count',
'origin_user_listed_count',
'origin_user_statuses_count',
'origin_user_favourites_count',
'origin_user_created_at',
'origin_entities_hashtags',
'origin_entities_user_mentions',
'origin_entities_symbols',
'origin_entities_polls',
'origin_entities_media_type',
'origin_retweet_count',
'origin_favorite_count',
'origin_status',
'min_interval',
'degree_centrality',
'eigenvector_centrality',
'closeness_centrality',
'betweenness_centrality',
'load_centrality',
'subgraph_centrality',
'harmonic_centrality',
'weighted_degree_centrality',
'weighted_eigenvector_centrality',
'weighted_closeness_centrality',
'weighted_betweenness_centrality',
'weighted_load_centrality',
'weighted_subgraph_centrality',
'weighted_harmonic_centrality',
'status'])
data_one = pd.read_csv(os.path.join(tweetnetworks,'%s_tweets.csv' % temp_user))
data_one = data_one.replace(np.nan, 'null', regex=True)
# data_one['created_at'] = data_one.apply(lambda row: row['origin_created_at'] if row['created_at'] == 'null' else row['created_at'],axis=1)
data_one['created_at'] = pd.to_datetime(data_one['created_at'])
data_one = data_one.sort_values('created_at', ascending=True)
data_one = data_one.reset_index()
# print (data_one['created_at'])
data_two = pd.read_csv(os.path.join(tweetnetworks,'%s_followee_tweets.csv' % temp_user))
data_two = data_two.replace(np.nan, 'null', regex=True)
# data_two['created_at'] = data_two.apply(lambda row: row['origin_created_at'] if row['created_at'] == 'null' else row['created_at'],axis=1)
data_two['created_at'] = pd.to_datetime(data_one['created_at'])
data_two = data_two.sort_values('created_at', ascending=True)
data_two = data_two.reset_index()
spacy.prefer_gpu()
nlp = spacy.load("en_core_web_sm")
G = nx.Graph()
corpus = textacy.Corpus(nlp)
calculate_centrality_original(data_one)
calculate_centrality_followee(data_two)
| 34.942715 | 185 | 0.620443 | 3,916 | 28,059 | 4.170838 | 0.075843 | 0.061838 | 0.042001 | 0.060001 | 0.819813 | 0.804567 | 0.801567 | 0.790669 | 0.778057 | 0.771261 | 0 | 0.010609 | 0.220607 | 28,059 | 803 | 186 | 34.942715 | 0.736248 | 0.131651 | 0 | 0.675725 | 0 | 0 | 0.184252 | 0.065996 | 0 | 0 | 0 | 0 | 0 | 1 | 0.005435 | false | 0.01087 | 0.028986 | 0 | 0.036232 | 0.041667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
cede8ca123aeee28a8eda308b7ab0a1cc67d6e7a | 454 | py | Python | othello/stone.py | tetsuyan153/Othello_Python | 2a5d803d217f1e5df20d5d66128fd15f3f19a405 | [
"Apache-2.0"
] | null | null | null | othello/stone.py | tetsuyan153/Othello_Python | 2a5d803d217f1e5df20d5d66128fd15f3f19a405 | [
"Apache-2.0"
] | null | null | null | othello/stone.py | tetsuyan153/Othello_Python | 2a5d803d217f1e5df20d5d66128fd15f3f19a405 | [
"Apache-2.0"
] | null | null | null | from enum import Enum
class Color(Enum):
WHITE = "White"
BLACK = "Black"
class Stone:
def __init__(self, c):
self._color = c
@property
def color(self):
return self._color
def reverse(self):
if self._color == Color.BLACK:
self._color = Color.WHITE
else:
self._color = Color.BLACK
def __str__(self):
return self._color.value
| 18.16 | 39 | 0.528634 | 51 | 454 | 4.431373 | 0.372549 | 0.238938 | 0.185841 | 0.168142 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.381057 | 454 | 24 | 40 | 18.916667 | 0.80427 | 0 | 0 | 0 | 0 | 0 | 0.02331 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.235294 | false | 0 | 0.058824 | 0.117647 | 0.647059 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
0c6a3b79c14879680da0d89883df62c87db7b6e2 | 860 | py | Python | notebook/pillow_image_resize_all.py | puyopop/python-snippets | 9d70aa3b2a867dd22f5a5e6178a5c0c5081add73 | [
"MIT"
] | 1 | 2020-07-18T17:58:43.000Z | 2020-07-18T17:58:43.000Z | notebook/pillow_image_resize_all.py | puyopop/python-snippets | 9d70aa3b2a867dd22f5a5e6178a5c0c5081add73 | [
"MIT"
] | null | null | null | notebook/pillow_image_resize_all.py | puyopop/python-snippets | 9d70aa3b2a867dd22f5a5e6178a5c0c5081add73 | [
"MIT"
] | null | null | null | import os
import glob
from PIL import Image
files = glob.glob('./data/temp/images/*.jpg')
for f in files:
img = Image.open(f)
img_resize = img.resize((int(img.width / 2), int(img.height / 2)))
title, ext = os.path.splitext(f)
img_resize.save(title + '_half' + ext)
files = glob.glob('./data/temp/images/*')
for f in files:
title, ext = os.path.splitext(f)
if ext in ['.jpg', '.png']:
img = Image.open(f)
img_resize = img.resize((int(img.width / 2), int(img.height / 2)))
img_resize.save(title + '_half' + ext)
files = glob.glob('./data/temp/images/*')
for f in files:
try:
img = Image.open(f)
img_resize = img.resize((int(img.width / 2), int(img.height / 2)))
title, ext = os.path.splitext(f)
img_resize.save(title + '_half' + ext)
except OSError as e:
pass
| 26.875 | 74 | 0.593023 | 134 | 860 | 3.738806 | 0.261194 | 0.161677 | 0.0998 | 0.101796 | 0.828343 | 0.828343 | 0.728543 | 0.728543 | 0.728543 | 0.728543 | 0 | 0.009119 | 0.234884 | 860 | 31 | 75 | 27.741935 | 0.75228 | 0 | 0 | 0.68 | 0 | 0 | 0.101163 | 0.027907 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.04 | 0.12 | 0 | 0.12 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0c880440b56060fbe1bb5a8df651f1d59aed2062 | 159 | py | Python | fable/long.py | dbrattli/fable-library | 5b344c5a8446ce2782931231b2334984610c593e | [
"MIT"
] | 2 | 2021-01-18T20:54:16.000Z | 2021-01-18T22:06:50.000Z | fable/long.py | dbrattli/fablelib | 5b344c5a8446ce2782931231b2334984610c593e | [
"MIT"
] | null | null | null | fable/long.py | dbrattli/fablelib | 5b344c5a8446ce2782931231b2334984610c593e | [
"MIT"
] | null | null | null | def fromBits(lowBits: int, highBits: int, unsigned: bool):
return lowBits + (highBits << 32)
def op_LeftShift(self, numBits):
return self << numBits
| 22.714286 | 58 | 0.691824 | 20 | 159 | 5.45 | 0.65 | 0.201835 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015504 | 0.188679 | 159 | 6 | 59 | 26.5 | 0.829457 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 6 |
0cf83889190e5fc002fa409f3e86ec829a8fe0d9 | 2,911 | py | Python | laia/utils/checks.py | basbeu/PyLaia | d14458484b56622204b1730a7d53220c5d0f1bc1 | [
"MIT"
] | 2 | 2020-09-10T13:31:17.000Z | 2021-07-31T09:44:17.000Z | laia/utils/checks.py | basbeu/PyLaia | d14458484b56622204b1730a7d53220c5d0f1bc1 | [
"MIT"
] | 1 | 2020-12-06T18:11:52.000Z | 2020-12-06T18:19:38.000Z | laia/utils/checks.py | basbeu/PyLaia | d14458484b56622204b1730a7d53220c5d0f1bc1 | [
"MIT"
] | 2 | 2020-04-20T13:40:56.000Z | 2020-10-17T11:59:55.000Z | from __future__ import absolute_import
from __future__ import division
import torch
import laia.common.logging as log
_TENSOR_REAL = (torch.float, torch.double, torch.half)
def check_inf(tensor, msg=None, name=None, raise_exception=False, **kwargs):
r"""Check whether a tensor contains a +/- infinite value.
Arguments:
tensor (torch.Tensor): tensor to check.
msg (str): message format string. The message format can use the keys
``abs_num`` and ``rel_num`` to print the absolute number and the
percentage of infinite elements. (Default: None)
name (str): Name of the logger used to log the event (Default: None)
raise_exception (bool): raise an exception instead of logging the event
(Default: False)
kwargs: additional named arguments passed to format the message.
Return:
`True` if the tensor contains any +/- infinite element, or `False`
otherwise.
"""
logger = log.get_logger(name)
if logger.isEnabledFor(log.DEBUG) and tensor.dtype in _TENSOR_REAL:
num_inf = torch.isinf(tensor).sum().item()
if num_inf > 0:
per_inf = num_inf / tensor.numel()
msg = (
"{:d} ({:.2%}) INF values found".format(num_inf, per_inf)
if msg is None
else msg.format(abs_num=num_inf, rel_num=per_inf, **kwargs)
)
if raise_exception:
raise ValueError(msg)
else:
logger.debug(msg)
return True
return False
def check_nan(tensor, msg=None, name=None, raise_exception=False, **kwargs):
r"""Check whether a tensor contains a NaN value.
Arguments:
tensor (torch.Tensor): tensor to check.
msg (str): message format string. The message format can use the keys
``abs_num`` and ``rel_num`` to print the absolute number and the
percentage of NaN elements. (Default: None)
name (str): Name of the logger used to log the event (Default: None)
raise_exception (bool): raise an exception instead of logging the event
(Default: False)
kwargs: additional named arguments passed to format the message.
Return:
`True` if the tensor contains any NaN element, or `False` otherwise.
"""
logger = log.get_logger(name)
if logger.isEnabledFor(log.DEBUG) and tensor.dtype in _TENSOR_REAL:
num_nan = torch.isnan(tensor).sum().item()
if num_nan > 0:
per_nan = num_nan / tensor.numel()
msg = (
"{:d} ({:.2%}) NaN values found".format(num_nan, per_nan)
if msg is None
else msg.format(abs_num=num_nan, rel_num=per_nan, **kwargs)
)
if raise_exception:
raise ValueError(msg)
else:
logger.debug(msg)
return True
return False
| 37.320513 | 77 | 0.616627 | 383 | 2,911 | 4.558747 | 0.229765 | 0.04811 | 0.041237 | 0.019473 | 0.805269 | 0.766323 | 0.766323 | 0.766323 | 0.766323 | 0.766323 | 0 | 0.001943 | 0.292683 | 2,911 | 77 | 78 | 37.805195 | 0.846042 | 0.42597 | 0 | 0.487805 | 0 | 0 | 0.038511 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04878 | false | 0 | 0.097561 | 0 | 0.243902 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0b317463154a6f253d4f8001fb39790bd3647064 | 112 | py | Python | goscripts/__init__.py | pmoris/go-tools | 1c7fffeefa4abf4a2188fc1c72aaede59e610723 | [
"MIT"
] | 3 | 2020-07-16T22:41:35.000Z | 2022-01-27T20:23:47.000Z | goscripts/__init__.py | pmoris/go-tools | 1c7fffeefa4abf4a2188fc1c72aaede59e610723 | [
"MIT"
] | null | null | null | goscripts/__init__.py | pmoris/go-tools | 1c7fffeefa4abf4a2188fc1c72aaede59e610723 | [
"MIT"
] | 3 | 2019-02-14T13:38:25.000Z | 2020-07-16T23:50:04.000Z | from . import enrichment_stats
from . import gaf_parser
from . import genelist_importer
from . import obo_tools
| 22.4 | 31 | 0.821429 | 16 | 112 | 5.5 | 0.625 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 112 | 4 | 32 | 28 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
0b32f0ab9984f690199b2f953402b05068e6e8c1 | 42 | py | Python | squawk/data/preprocessing/__init__.py | daemon/squawk | df6443a200f8bfef7d5338d8577fc30eac4f49b9 | [
"MIT"
] | null | null | null | squawk/data/preprocessing/__init__.py | daemon/squawk | df6443a200f8bfef7d5338d8577fc30eac4f49b9 | [
"MIT"
] | null | null | null | squawk/data/preprocessing/__init__.py | daemon/squawk | df6443a200f8bfef7d5338d8577fc30eac4f49b9 | [
"MIT"
] | null | null | null | from .augment import *
from .base import * | 21 | 22 | 0.738095 | 6 | 42 | 5.166667 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166667 | 42 | 2 | 23 | 21 | 0.885714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e7e59a53c99ef933d72e96aad4caf7daea29ba0b | 133 | py | Python | app/accounts/admin.py | gladunvv/url-shorteners-api | ecb01fc0b825f8972140bc99ac331735432ab966 | [
"MIT"
] | null | null | null | app/accounts/admin.py | gladunvv/url-shorteners-api | ecb01fc0b825f8972140bc99ac331735432ab966 | [
"MIT"
] | 1 | 2020-06-05T20:26:01.000Z | 2020-06-05T20:26:01.000Z | app/accounts/admin.py | gladunvv/app-quiz-django | ecb01fc0b825f8972140bc99ac331735432ab966 | [
"MIT"
] | null | null | null | from django.contrib import admin
from accounts.models import User
@admin.register(User)
class QuizAdmin(admin.ModelAdmin):
pass | 19 | 34 | 0.796992 | 18 | 133 | 5.888889 | 0.722222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12782 | 133 | 7 | 35 | 19 | 0.913793 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.2 | 0.4 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 6 |
e7e856b43797be3feaec62183365505d459f10c4 | 48 | py | Python | circle_ci_test_dev/unit_test/screen/screen_test.py | JE-Chen/Python_JEAutoControl | 477bf9612e28e9ab6d0a8e269db2f699e50a3744 | [
"MIT"
] | 9 | 2020-10-12T06:33:36.000Z | 2021-09-13T07:07:36.000Z | circle_ci_test_stable/unit_test/screen/screen_test.py | JE-Chen/Python_JEAutoControl | 477bf9612e28e9ab6d0a8e269db2f699e50a3744 | [
"MIT"
] | null | null | null | circle_ci_test_stable/unit_test/screen/screen_test.py | JE-Chen/Python_JEAutoControl | 477bf9612e28e9ab6d0a8e269db2f699e50a3744 | [
"MIT"
] | null | null | null | from je_auto_control import size
print(size())
| 12 | 32 | 0.791667 | 8 | 48 | 4.5 | 0.875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125 | 48 | 3 | 33 | 16 | 0.857143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0.5 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 6 |
f00f21062ed07640f45e25656e045fdfc3cdb0ea | 48 | py | Python | luminoth/utils/hooks/__init__.py | PiterPentester/luminoth | da0186515586291fbb9544c98240979480355f7a | [
"BSD-3-Clause"
] | 2 | 2018-01-25T10:05:10.000Z | 2020-05-16T13:01:24.000Z | luminoth/utils/hooks/__init__.py | macressler/luminoth | da0186515586291fbb9544c98240979480355f7a | [
"BSD-3-Clause"
] | null | null | null | luminoth/utils/hooks/__init__.py | macressler/luminoth | da0186515586291fbb9544c98240979480355f7a | [
"BSD-3-Clause"
] | null | null | null | from .image_vis_hook import ImageVisHook # noqa | 48 | 48 | 0.833333 | 7 | 48 | 5.428571 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125 | 48 | 1 | 48 | 48 | 0.904762 | 0.083333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
f033c648c4fbc044e886a6be4d140ca0d3a738ce | 39 | py | Python | hello_universe/start.py | jayanthvarma134/hello-universe | ab5453731471c172f41ce63c99487cb05faab998 | [
"MIT"
] | null | null | null | hello_universe/start.py | jayanthvarma134/hello-universe | ab5453731471c172f41ce63c99487cb05faab998 | [
"MIT"
] | null | null | null | hello_universe/start.py | jayanthvarma134/hello-universe | ab5453731471c172f41ce63c99487cb05faab998 | [
"MIT"
] | null | null | null | def call():
print("Hello Universe") | 19.5 | 27 | 0.641026 | 5 | 39 | 5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.179487 | 39 | 2 | 27 | 19.5 | 0.78125 | 0 | 0 | 0 | 0 | 0 | 0.35 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0 | 0.5 | 0.5 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
f07a09551a6125d8ce1715cffeff80e1757afb36 | 262 | py | Python | cogs/utils/db.py | j-nguyen/FractalBot | 03e562cf1a697a4b3106f8565083b85c7c5ed76b | [
"Apache-2.0"
] | 8 | 2016-11-06T04:24:39.000Z | 2021-07-18T11:32:05.000Z | cogs/utils/db.py | j-nguyen/FractalBot | 03e562cf1a697a4b3106f8565083b85c7c5ed76b | [
"Apache-2.0"
] | 44 | 2016-10-11T14:18:27.000Z | 2017-06-03T05:26:52.000Z | cogs/utils/db.py | j-nguyen/FractalBot | 03e562cf1a697a4b3106f8565083b85c7c5ed76b | [
"Apache-2.0"
] | 1 | 2021-07-18T13:59:07.000Z | 2021-07-18T13:59:07.000Z | from sqlalchemy import create_engine
import json
# TODO: Fix usage of global
engine = None
def loadDB(user, password, hostname, dbname):
global engine
engine = create_engine('postgresql+psycopg2://{}:{}@{}/{}'.format(user, password, hostname, dbname))
| 26.2 | 104 | 0.721374 | 32 | 262 | 5.84375 | 0.65625 | 0.128342 | 0.213904 | 0.278075 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004464 | 0.145038 | 262 | 9 | 105 | 29.111111 | 0.830357 | 0.09542 | 0 | 0 | 0 | 0 | 0.140426 | 0.140426 | 0 | 0 | 0 | 0.111111 | 0 | 1 | 0.166667 | false | 0.333333 | 0.333333 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 6 |
b2cd34dea5bb75ab33de8e9ec6bd852d9f9c244c | 74 | py | Python | build/lib/AqOrg/__init__.py | worm-portal/AqOrg | 48f10fb7a05c03e0ed3815eed8dc9ea82fcd150b | [
"MIT"
] | null | null | null | build/lib/AqOrg/__init__.py | worm-portal/AqOrg | 48f10fb7a05c03e0ed3815eed8dc9ea82fcd150b | [
"MIT"
] | null | null | null | build/lib/AqOrg/__init__.py | worm-portal/AqOrg | 48f10fb7a05c03e0ed3815eed8dc9ea82fcd150b | [
"MIT"
] | null | null | null | from .AqOrg import Estimate, find_sigfigs, find_HKF, find_HKF_test, Joback | 74 | 74 | 0.837838 | 12 | 74 | 4.833333 | 0.75 | 0.241379 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094595 | 74 | 1 | 74 | 74 | 0.865672 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
650097963aed35be00531eddd17d11ec5020624b | 42 | py | Python | cloudbutton_geospatial/datafetch_utils/__init__.py | berkevaroll/geospatial-usecase | d3db18607be0976badde073b3ee7c8b9613372e1 | [
"Apache-2.0"
] | null | null | null | cloudbutton_geospatial/datafetch_utils/__init__.py | berkevaroll/geospatial-usecase | d3db18607be0976badde073b3ee7c8b9613372e1 | [
"Apache-2.0"
] | null | null | null | cloudbutton_geospatial/datafetch_utils/__init__.py | berkevaroll/geospatial-usecase | d3db18607be0976badde073b3ee7c8b9613372e1 | [
"Apache-2.0"
] | 4 | 2021-03-29T09:03:52.000Z | 2021-09-21T18:27:01.000Z | """
AUTHOR: Juanjo
DATE: 12/02/2019
""" | 6 | 16 | 0.571429 | 6 | 42 | 4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.235294 | 0.190476 | 42 | 7 | 17 | 6 | 0.470588 | 0.761905 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
652458aa0c4a4066c3c833664f2ff7d9592eed17 | 99 | py | Python | psyspy/exceptions/__init__.py | stantontcady/psyspy | 72bc0aaacea4191899a971ef46314f09d97b269f | [
"MIT"
] | 1 | 2016-06-02T16:58:36.000Z | 2016-06-02T16:58:36.000Z | psyspy/exceptions/__init__.py | stantontcady/psyspy | 72bc0aaacea4191899a971ef46314f09d97b269f | [
"MIT"
] | null | null | null | psyspy/exceptions/__init__.py | stantontcady/psyspy | 72bc0aaacea4191899a971ef46314f09d97b269f | [
"MIT"
] | null | null | null | from exceptions import BusError, GeneratorModelError, ModelError, PowerLineError, PowerNetworkError | 99 | 99 | 0.888889 | 8 | 99 | 11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070707 | 99 | 1 | 99 | 99 | 0.956522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e8e226efd2d119c273b7379625bd0493036bcf17 | 20 | py | Python | pyNiDAQ/__init__.py | gregmoille/InstrumentControl | 4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133 | [
"MIT"
] | 3 | 2018-05-02T20:14:15.000Z | 2020-10-18T03:57:09.000Z | pyNiDAQ/__init__.py | gregmoille/InstrumentControl | 4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133 | [
"MIT"
] | 1 | 2019-05-23T15:21:08.000Z | 2019-05-23T15:21:08.000Z | pyNiDAQ/__init__.py | gregmoille/InstrumentControl | 4cc8477e36f7c4ad4bf4f54036fdd8dd985b4133 | [
"MIT"
] | 2 | 2019-05-16T20:36:25.000Z | 2020-09-22T18:26:49.000Z | from .daq import DAQ | 20 | 20 | 0.8 | 4 | 20 | 4 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15 | 20 | 1 | 20 | 20 | 0.941176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e8e3e795ae581be9cc7062ad7a07011a338e924f | 24 | py | Python | pyvoqc/__init__.py | akshajgaur/pyvoqc | 6352d64542be5fed72e7cae941d4a2a7db012a4f | [
"MIT"
] | 1 | 2021-11-17T10:50:50.000Z | 2021-11-17T10:50:50.000Z | pyvoqc/__init__.py | akshajgaur/pyvoqc | 6352d64542be5fed72e7cae941d4a2a7db012a4f | [
"MIT"
] | 5 | 2021-03-14T20:13:25.000Z | 2021-04-10T01:15:05.000Z | pyvoqc/__init__.py | akshajgaur/pyvoqc | 6352d64542be5fed72e7cae941d4a2a7db012a4f | [
"MIT"
] | 2 | 2021-03-13T16:16:34.000Z | 2022-01-27T19:28:15.000Z | from .voqc import VOQC
| 8 | 22 | 0.75 | 4 | 24 | 4.5 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.208333 | 24 | 2 | 23 | 12 | 0.947368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
e8e5d4085f2c1b231a4eae9c96027de4e23bc1f2 | 2,847 | py | Python | functional_tests/translate/test_papago.py | Rested/multi-translate | 565ef2ac7e8b5f94595cecc78b4076a3bc9be45e | [
"MIT"
] | 1 | 2021-08-22T14:43:11.000Z | 2021-08-22T14:43:11.000Z | functional_tests/translate/test_papago.py | Rested/multi-translate | 565ef2ac7e8b5f94595cecc78b4076a3bc9be45e | [
"MIT"
] | null | null | null | functional_tests/translate/test_papago.py | Rested/multi-translate | 565ef2ac7e8b5f94595cecc78b4076a3bc9be45e | [
"MIT"
] | null | null | null | import httpx
from functional_tests.translate.test_translate import translate_url
def test_translate_papago_basic():
request_data = {
"from_language": "en",
"to_language": "ko",
"source_text": "hello",
"preferred_engine": "papago",
"with_alignment": False,
}
resp = httpx.get(translate_url(), params=request_data)
assert resp.status_code == 200
result = resp.json()
assert result == {
"translated_text": "안녕하십니까",
"engine": "papago",
"engine_version": "1",
"from_language": "en",
"to_language": "ko",
"source_text": "hello",
"detected_language_confidence": None,
"alignment": None,
}
def test_translate_papago_detection():
request_data = {
"to_language": "es",
"source_text": "hello",
"preferred_engine": "papago",
"with_alignment": False,
}
resp = httpx.get(translate_url(), params=request_data)
assert resp.status_code == 400
result = resp.json()
assert result == {
"detail": "papago (1) engine does not support detection, please specify from_language"
}
def test_translate_papago_alignment():
request_data = {
"from_language": "en",
"to_language": "ko",
"source_text": "hello",
"preferred_engine": "papago",
"with_alignment": True,
}
resp = httpx.get(translate_url(), params=request_data)
assert resp.status_code == 400
result = resp.json()
assert result == {
"detail": "papago (1) does not support alignment",
}
def test_is_best_for_korean():
request_data = {
"from_language": "en",
"to_language": "ko",
"source_text": "hello",
"with_alignment": False,
}
resp = httpx.get(translate_url(), params=request_data)
assert resp.status_code == 200
result = resp.json()
assert result == {
"translated_text": "안녕하십니까",
"engine": "papago",
"engine_version": "1",
"from_language": "en",
"to_language": "ko",
"source_text": "hello",
"detected_language_confidence": None,
"alignment": None,
}
def test_will_fallback_when_papago_does_not_support_language_pair():
request_data = {
"from_language": "en",
"to_language": "es",
"source_text": "hello",
"with_alignment": False,
"fallback": True,
}
resp = httpx.get(translate_url(), params=request_data)
assert resp.status_code == 200
result = resp.json()
assert result == {
"translated_text": "hola",
"engine": "deepl",
"engine_version": "2",
"from_language": "en",
"to_language": "es",
"source_text": "hello",
"detected_language_confidence": None,
"alignment": None,
}
| 27.640777 | 94 | 0.588339 | 299 | 2,847 | 5.304348 | 0.207358 | 0.069357 | 0.075662 | 0.070618 | 0.781211 | 0.781211 | 0.762295 | 0.755359 | 0.755359 | 0.676545 | 0 | 0.009634 | 0.270811 | 2,847 | 102 | 95 | 27.911765 | 0.754335 | 0 | 0 | 0.736264 | 0 | 0 | 0.300667 | 0.029505 | 0 | 0 | 0 | 0 | 0.10989 | 1 | 0.054945 | false | 0 | 0.021978 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
68b3f28a085461bf2ba636e11202a00ad24d3a5e | 103 | py | Python | sandbox/hello.py | jleihe/pythonTP | 7f4495e7514983a29adf1f333178908653cd4de1 | [
"MIT"
] | 1 | 2015-01-06T11:48:42.000Z | 2015-01-06T11:48:42.000Z | sandbox/hello.py | jleihe/pythonTP | 7f4495e7514983a29adf1f333178908653cd4de1 | [
"MIT"
] | null | null | null | sandbox/hello.py | jleihe/pythonTP | 7f4495e7514983a29adf1f333178908653cd4de1 | [
"MIT"
] | null | null | null | # Author: Joshua Leihe
#
# Notes: Short helloworld program
# Print hello world!
print("hello world!")
| 14.714286 | 33 | 0.718447 | 13 | 103 | 5.692308 | 0.769231 | 0.27027 | 0.405405 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.165049 | 103 | 6 | 34 | 17.166667 | 0.860465 | 0.68932 | 0 | 0 | 0 | 0 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 6 |
d7f4a9696d561370e2067e38e75cf4c55d464fff | 202 | py | Python | simulation_camera/lg_write_new.py | kolbt/whingdingdilly | 4c17b594ebc583750fe7565d6414f08678ea7882 | [
"BSD-3-Clause"
] | 4 | 2017-09-04T14:36:57.000Z | 2022-03-28T23:24:58.000Z | simulation_camera/lg_write_new.py | kolbt/whingdingdilly | 4c17b594ebc583750fe7565d6414f08678ea7882 | [
"BSD-3-Clause"
] | null | null | null | simulation_camera/lg_write_new.py | kolbt/whingdingdilly | 4c17b594ebc583750fe7565d6414f08678ea7882 | [
"BSD-3-Clause"
] | null | null | null | import sys
from PIL import Image
composite = str(sys.argv[1])
img = Image.new("RGBA",(28800,19800),(255, 255, 255, 255))
#img = Image.new("RGBA",(37800,1800),(255, 255, 255, 255))
img.save(composite)
| 22.444444 | 58 | 0.673267 | 34 | 202 | 4 | 0.529412 | 0.264706 | 0.264706 | 0.220588 | 0.220588 | 0 | 0 | 0 | 0 | 0 | 0 | 0.247191 | 0.118812 | 202 | 8 | 59 | 25.25 | 0.516854 | 0.282178 | 0 | 0 | 0 | 0 | 0.027778 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.4 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 6 |
04265c401700730a097bb747923de4c4587e7250 | 123 | py | Python | gpcraptor/__init__.py | uibcdf/GPCRaptor | 8cb28793f5bfe01fb42656edf72f925c2f6fdf59 | [
"MIT"
] | 2 | 2018-08-07T17:05:51.000Z | 2018-08-09T16:37:17.000Z | gpcraptor/__init__.py | uibcdf/GPCRaptor | 8cb28793f5bfe01fb42656edf72f925c2f6fdf59 | [
"MIT"
] | 2 | 2018-08-09T18:58:21.000Z | 2018-08-14T16:18:22.000Z | gpcraptor/__init__.py | uibcdf/GPCRaptor | 8cb28793f5bfe01fb42656edf72f925c2f6fdf59 | [
"MIT"
] | 2 | 2018-08-07T18:37:35.000Z | 2018-08-09T18:56:43.000Z | from .dining_table import Dining_Table
from .receptors import *
from . import receptor_observables as receptor_observables
| 30.75 | 58 | 0.853659 | 16 | 123 | 6.3125 | 0.5 | 0.217822 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.113821 | 123 | 3 | 59 | 41 | 0.926606 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
043d8faa6145e5a96347f39f715f73b46e7c0c13 | 157 | py | Python | ssc/api.py | StationA/sscpy | bfef056ecb165f463a464848b1830f78fd4e383e | [
"MIT"
] | 3 | 2018-08-22T22:02:50.000Z | 2020-10-16T22:10:28.000Z | ssc/api.py | StationA/sscpy | bfef056ecb165f463a464848b1830f78fd4e383e | [
"MIT"
] | 2 | 2020-10-01T20:25:14.000Z | 2021-11-08T06:06:36.000Z | ssc/api.py | StationA/sscpy | bfef056ecb165f463a464848b1830f78fd4e383e | [
"MIT"
] | null | null | null | from ssc.core import Data, enable_logging, disable_logging, list_modules
from ssc.modules.battwatts import BattWatts
from ssc.modules.pvwatts import PVWatts
| 39.25 | 72 | 0.853503 | 23 | 157 | 5.695652 | 0.521739 | 0.160305 | 0.21374 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.095541 | 157 | 3 | 73 | 52.333333 | 0.922535 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
044939f6f2ee2d6008b605704af2f2fb405cbaa8 | 35 | py | Python | utils/__init__.py | danlitovitz/RandWire_tensorflow | 16530dcbf90b47275016db1a4222bedcd41a7859 | [
"MIT"
] | 22 | 2019-04-17T05:41:25.000Z | 2020-04-10T06:46:50.000Z | utils/__init__.py | plemeri/RandWire_tensorflow | 053731d321ad739133d1052a64c00af8efe1265a | [
"MIT"
] | 3 | 2019-04-18T11:05:51.000Z | 2019-09-01T12:08:08.000Z | utils/__init__.py | plemeri/RandWire_tensorflow | 053731d321ad739133d1052a64c00af8efe1265a | [
"MIT"
] | 8 | 2019-04-29T14:02:00.000Z | 2020-05-19T15:23:43.000Z | from utils import graph_generator
| 17.5 | 34 | 0.857143 | 5 | 35 | 5.8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 35 | 1 | 35 | 35 | 0.966667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 6 |
f0ec81a4fcfacf39d14968458a78693f0461f229 | 186,962 | py | Python | image/controllers.py | utsab/WeVoteServer | 0734da7e1dcf8093599d8c03638ed0311c9f988b | [
"MIT"
] | null | null | null | image/controllers.py | utsab/WeVoteServer | 0734da7e1dcf8093599d8c03638ed0311c9f988b | [
"MIT"
] | null | null | null | image/controllers.py | utsab/WeVoteServer | 0734da7e1dcf8093599d8c03638ed0311c9f988b | [
"MIT"
] | null | null | null | # image/controllers.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
import requests
import wevote_functions.admin
from .functions import analyze_remote_url, analyze_image_file, analyze_image_in_memory
from .models import WeVoteImageManager, WeVoteImage, \
CHOSEN_FAVICON_NAME, CHOSEN_LOGO_NAME, CHOSEN_SOCIAL_SHARE_IMAGE_NAME, \
FACEBOOK_PROFILE_IMAGE_NAME, FACEBOOK_BACKGROUND_IMAGE_NAME, \
TWITTER_PROFILE_IMAGE_NAME, TWITTER_BACKGROUND_IMAGE_NAME, TWITTER_BANNER_IMAGE_NAME, MAPLIGHT_IMAGE_NAME, \
VOTE_SMART_IMAGE_NAME, MASTER_IMAGE, ISSUE_IMAGE_NAME, BALLOTPEDIA_IMAGE_NAME, CAMPAIGNX_PHOTO_IMAGE_NAME, \
LINKEDIN_IMAGE_NAME, WIKIPEDIA_IMAGE_NAME
from candidate.models import CandidateManager
from config.base import get_environment_variable
from django.db.models import Q
from import_export_facebook.models import FacebookManager
from issue.models import IssueManager
from organization.models import OrganizationManager
from politician.models import PoliticianManager
from position.controllers import reset_all_position_image_details_from_candidate, \
reset_position_for_friends_image_details_from_voter, reset_position_entered_image_details_from_organization, \
update_all_position_details_from_candidate
from twitter.functions import retrieve_twitter_user_info
from twitter.models import TwitterUserManager
from voter.models import VoterManager, VoterDeviceLink, VoterDeviceLinkManager, VoterAddressManager, VoterAddress, Voter
from voter_guide.models import VoterGuideManager
from wevote_functions.functions import positive_value_exists, convert_to_int
logger = wevote_functions.admin.get_logger(__name__)
HTTP_OK = 200
# These constants are used for "image_source" which is not a WeVoteImage table value, but gets used in the controller
# to set the table values like: kind_of_image_twitter_profile and kind_of_image_facebook_profile
# code. "other_source" is a database table value that is not given its own "kind_of_image..." table boolean
TWITTER = "twitter"
FACEBOOK = "facebook"
MAPLIGHT = "maplight"
VOTE_SMART = "vote_smart"
BALLOTPEDIA_IMAGE_SOURCE = "ballotpedia"
LINKEDIN = "linkedin"
WIKIPEDIA = "wikipedia"
OTHER_SOURCE = "other_source" # Set "kind_of_image_other_source" to true
MAPLIGHT_URL_NOT_FOUND = "maplight url not found"
VOTE_SMART_URL_NOT_FOUND = "votesmart url not found"
BALLOTPEDIA_URL_NOT_FOUND = "ballotpedia url not found"
CAMPAIGNX_PHOTO_URL_NOT_FOUND = "campaignx photo url not found"
LINKEDIN_URL_NOT_FOUND = "linkedin url not found"
WIKIPEDIA_URL_NOT_FOUND = "wikipedia url not found"
OTHER_SOURCE_URL_NOT_FOUND = "other source url not found"
FACEBOOK_USER_DOES_NOT_EXIST = "facebook user does not exist"
FACEBOOK_URL_NOT_FOUND = "facebook url not found"
TWITTER_USER_DOES_NOT_EXIST = "twitter user does not exist"
TWITTER_URL_NOT_FOUND = "twitter url not found"
IMAGE_ALREADY_CACHED = "image already cached"
ALL_KIND_OF_IMAGE = ['kind_of_image_twitter_profile', 'kind_of_image_twitter_background',
'kind_of_image_twitter_banner', 'kind_of_image_facebook_profile',
'kind_of_image_facebook_background', 'kind_of_image_maplight', 'kind_of_image_vote_smart']
# Search for in campaign/controllers.py as well
# Facebook shared image: 1200 x 630
# Facebook shared link: 1200 x 628
# Tweet with image in shared link: 1200 x 628
# Tweet with single image: 1200 x 675 (Twitter recommended aspect ratio is 16:9)
CAMPAIGN_PHOTO_ORIGINAL_MAX_WIDTH = 1200
CAMPAIGN_PHOTO_ORIGINAL_MAX_HEIGHT = 628
CAMPAIGN_PHOTO_LARGE_MAX_WIDTH = 575
CAMPAIGN_PHOTO_LARGE_MAX_HEIGHT = 301
CAMPAIGN_PHOTO_MEDIUM_MAX_WIDTH = 224
CAMPAIGN_PHOTO_MEDIUM_MAX_HEIGHT = 117
CAMPAIGN_PHOTO_SMALL_MAX_WIDTH = 140
CAMPAIGN_PHOTO_SMALL_MAX_HEIGHT = 73
PROFILE_IMAGE_LARGE_WIDTH = convert_to_int(get_environment_variable("PROFILE_IMAGE_LARGE_WIDTH"))
PROFILE_IMAGE_LARGE_HEIGHT = convert_to_int(get_environment_variable("PROFILE_IMAGE_LARGE_HEIGHT"))
PROFILE_IMAGE_MEDIUM_WIDTH = convert_to_int(get_environment_variable("PROFILE_IMAGE_MEDIUM_WIDTH"))
PROFILE_IMAGE_MEDIUM_HEIGHT = convert_to_int(get_environment_variable("PROFILE_IMAGE_MEDIUM_HEIGHT"))
PROFILE_IMAGE_TINY_WIDTH = convert_to_int(get_environment_variable("PROFILE_IMAGE_TINY_WIDTH"))
PROFILE_IMAGE_TINY_HEIGHT = convert_to_int(get_environment_variable("PROFILE_IMAGE_TINY_HEIGHT"))
ISSUES_IMAGE_LARGE_WIDTH = convert_to_int(get_environment_variable("ISSUES_IMAGE_LARGE_WIDTH"))
ISSUES_IMAGE_LARGE_HEIGHT = convert_to_int(get_environment_variable("ISSUES_IMAGE_LARGE_HEIGHT"))
ISSUES_IMAGE_MEDIUM_WIDTH = convert_to_int(get_environment_variable("ISSUES_IMAGE_MEDIUM_WIDTH"))
ISSUES_IMAGE_MEDIUM_HEIGHT = convert_to_int(get_environment_variable("ISSUES_IMAGE_MEDIUM_HEIGHT"))
ISSUES_IMAGE_TINY_WIDTH = convert_to_int(get_environment_variable("ISSUES_IMAGE_TINY_WIDTH"))
ISSUES_IMAGE_TINY_HEIGHT = convert_to_int(get_environment_variable("ISSUES_IMAGE_TINY_HEIGHT"))
AWS_STORAGE_BUCKET_NAME = get_environment_variable("AWS_STORAGE_BUCKET_NAME")
try:
SOCIAL_BACKGROUND_IMAGE_WIDTH = convert_to_int(get_environment_variable("SOCIAL_BACKGROUND_IMAGE_WIDTH"))
SOCIAL_BACKGROUND_IMAGE_HEIGHT = convert_to_int(get_environment_variable("SOCIAL_BACKGROUND_IMAGE_HEIGHT"))
except Exception:
# In case not defined in a dev environment, use the default values which come from the Sept 2017 size of the react
# image class="organization-banner-image-img"
logger.error(
"SOCIAL_BACKGROUND_IMAGE_WIDTH and/or SOCIAL_BACKGROUND_IMAGE_HEIGHT not defined in environment_variables.")
SOCIAL_BACKGROUND_IMAGE_HEIGHT = 200 # HTML x
SOCIAL_BACKGROUND_IMAGE_WIDTH = 900 # HTML y
def cache_all_kind_of_images_locally_for_all_organizations():
"""
Cache all kind of images locally for all organizations
:return:
"""
cache_images_locally_for_all_organizations_results = []
# TODO Update this for organizations
# voter_list = Voter.objects.all()
#
# # If there is a value in twitter_id OR facebook_id, return the voter
# image_filters = []
# new_filter = Q(twitter_id__isnull=False)
# image_filters.append(new_filter)
# new_filter = Q(facebook_id__isnull=False)
# image_filters.append(new_filter)
#
# # Add the first query
# final_filters = image_filters.pop()
#
# # ...and "OR" the remaining items in the list
# for item in image_filters:
# final_filters |= item
#
# # voter_list = voter_list.filter(final_filters)
# voter_list = voter_list.order_by('-is_admin', '-is_verified_volunteer', 'facebook_email', 'twitter_screen_name',
# 'last_name', 'first_name')
# voter_list = voter_list[:200] # Limit to 200 for now
#
# for voter in voter_list:
# cache_images_for_one_organization_results = migrate_remote_voter_image_urls_to_local_cache(voter.id)
# cache_images_locally_for_all_organizations_results.append(cache_images_for_one_organization_results)
return cache_images_locally_for_all_organizations_results
def cache_all_kind_of_images_locally_for_all_voters():
"""
Cache all kind of images locally for all voters
:return:
"""
cache_images_locally_for_all_voters_results = []
voter_list = Voter.objects.all()
# If there is a value in twitter_id OR facebook_id, return the voter
image_filters = []
new_filter = Q(twitter_id__isnull=False)
image_filters.append(new_filter)
new_filter = Q(facebook_id__isnull=False)
image_filters.append(new_filter)
# Add the first query
final_filters = image_filters.pop()
# ...and "OR" the remaining items in the list
for item in image_filters:
final_filters |= item
# voter_list = voter_list.filter(final_filters)
voter_list = voter_list.order_by('-is_admin', '-is_verified_volunteer', 'facebook_email', 'twitter_screen_name',
'last_name', 'first_name')
voter_list = voter_list[:200] # Limit to 200 for now
for voter in voter_list:
cache_images_for_a_voter_results = cache_voter_master_images(voter.id)
cache_images_locally_for_all_voters_results.append(cache_images_for_a_voter_results)
return cache_images_locally_for_all_voters_results
def cache_image_if_not_cached(
google_civic_election_id=0,
image_url_https='',
voter_we_vote_id=None,
candidate_we_vote_id=None,
organization_we_vote_id=None,
issue_we_vote_id=None,
twitter_id=None,
twitter_screen_name=None,
facebook_user_id=None,
maplight_id=None,
vote_smart_id=None,
is_active_version=False,
kind_of_image_twitter_profile=False,
kind_of_image_twitter_background=False,
kind_of_image_twitter_banner=False,
kind_of_image_facebook_profile=False,
kind_of_image_facebook_background=False,
kind_of_image_maplight=False,
kind_of_image_vote_smart=False,
kind_of_image_issue=False,
kind_of_image_ballotpedia_profile=False,
kind_of_image_linkedin_profile=False,
kind_of_image_wikipedia_profile=False,
kind_of_image_other_source=False,
kind_of_image_original=False,
facebook_background_image_offset_x=None,
facebook_background_image_offset_y=None,
other_source=None):
"""
Check if image is already cached or not. If not then cached it.
:param google_civic_election_id:
:param image_url_https:
:param voter_we_vote_id:
:param candidate_we_vote_id:
:param organization_we_vote_id:
:param issue_we_vote_id:
:param twitter_id:
:param twitter_screen_name:
:param facebook_user_id:
:param maplight_id:
:param vote_smart_id:
:param is_active_version:
:param kind_of_image_twitter_profile:
:param kind_of_image_twitter_background:
:param kind_of_image_twitter_banner:
:param kind_of_image_facebook_profile:
:param kind_of_image_facebook_background:
:param kind_of_image_maplight:
:param kind_of_image_vote_smart:
:param kind_of_image_issue:
:param kind_of_image_ballotpedia_profile:
:param kind_of_image_linkedin_profile:
:param kind_of_image_wikipedia_profile:
:param kind_of_image_other_source:
:param kind_of_image_original:
:param facebook_background_image_offset_x:
:param facebook_background_image_offset_y:
:param other_source:
:return:
"""
we_vote_image_manager = WeVoteImageManager()
cached_we_vote_image_results = we_vote_image_manager.retrieve_recent_cached_we_vote_image(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_original=kind_of_image_original,
is_active_version=True)
# If recent cached image matches with the current one the image is already cached
cached_we_vote_image = cached_we_vote_image_results['we_vote_image']
if cached_we_vote_image_results['we_vote_image_found'] and \
image_url_https == cached_we_vote_image.twitter_profile_image_url_https or \
image_url_https == cached_we_vote_image.twitter_profile_background_image_url_https or \
image_url_https == cached_we_vote_image.twitter_profile_banner_url_https or \
image_url_https == cached_we_vote_image.facebook_profile_image_url_https or \
image_url_https == cached_we_vote_image.facebook_background_image_url_https or \
image_url_https == cached_we_vote_image.maplight_image_url_https or \
image_url_https == cached_we_vote_image.vote_smart_image_url_https or \
image_url_https == cached_we_vote_image.issue_image_url_https or \
image_url_https == cached_we_vote_image.ballotpedia_profile_image_url or \
image_url_https == cached_we_vote_image.linkedin_profile_image_url or \
image_url_https == cached_we_vote_image.wikipedia_profile_image_url or \
image_url_https == cached_we_vote_image.other_source_image_url:
cache_image_results = IMAGE_ALREADY_CACHED
else:
# Image is not cached so caching it
cache_image_locally_results = cache_image_locally(
google_civic_election_id=google_civic_election_id,
image_url_https=image_url_https,
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
twitter_id=twitter_id,
facebook_user_id=facebook_user_id,
maplight_id=maplight_id,
vote_smart_id=vote_smart_id,
twitter_screen_name=twitter_screen_name,
is_active_version=is_active_version,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_original=kind_of_image_original,
facebook_background_image_offset_x=facebook_background_image_offset_x,
facebook_background_image_offset_y=facebook_background_image_offset_y,
other_source=other_source,
)
cache_image_results = cache_image_locally_results['success']
if cache_image_results:
set_active_version_false_results = we_vote_image_manager.set_active_version_false_for_other_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
image_url_https=image_url_https,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,)
return cache_image_results
def cache_organization_master_images(organization_we_vote_id):
"""
Cache all kind of master images for an organization such as profile, background
:param organization_we_vote_id:
:return:
"""
cache_all_kind_of_images_results = {
'organization_we_vote_id': "",
'cached_twitter_profile_image': False,
'cached_twitter_background_image': False,
'cached_twitter_banner_image': False,
'cached_facebook_profile_image': False,
'cached_facebook_background_image': False
}
google_civic_election_id = 0
twitter_id = None
organization_manager = OrganizationManager()
organization_results = organization_manager.retrieve_organization_from_we_vote_id(organization_we_vote_id)
if not organization_results['organization_found']:
return cache_all_kind_of_images_results
organization = organization_results['organization']
organization_we_vote_id = organization.we_vote_id
if positive_value_exists(organization_we_vote_id):
cache_all_kind_of_images_results['organization_we_vote_id'] = organization_we_vote_id
else:
return cache_all_kind_of_images_results
twitter_user_manager = TwitterUserManager()
twitter_screen_name = ''
twitter_link_to_organization_results = \
twitter_user_manager.retrieve_twitter_link_to_organization_from_organization_we_vote_id(organization_we_vote_id)
if twitter_link_to_organization_results['twitter_link_to_organization_found']:
twitter_link_to_organization = twitter_link_to_organization_results['twitter_link_to_organization']
twitter_id = twitter_link_to_organization.twitter_id
twitter_screen_name = twitter_link_to_organization.fetch_twitter_handle_locally_or_remotely()
if not positive_value_exists(twitter_id):
cache_all_kind_of_images_results = {
'organization_we_vote_id': organization_we_vote_id,
'organization': organization,
'cached_twitter_profile_image': TWITTER_USER_DOES_NOT_EXIST,
'cached_twitter_background_image': TWITTER_USER_DOES_NOT_EXIST,
'cached_twitter_banner_image': TWITTER_USER_DOES_NOT_EXIST,
}
return cache_all_kind_of_images_results
# Retrieve latest twitter image urls from Twitter
latest_image_urls_results = retrieve_image_urls_from_twitter(twitter_id)
twitter_profile_image_url_https = latest_image_urls_results['latest_twitter_profile_image_url']
twitter_profile_background_image_url_https = latest_image_urls_results['latest_twitter_background_image_url']
twitter_profile_banner_url_https = latest_image_urls_results['latest_twitter_banner_image_url']
# Cache all images if not already cached
if not twitter_profile_image_url_https:
cache_all_kind_of_images_results['cached_twitter_profile_image'] = TWITTER_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_twitter_profile_image'] = cache_image_if_not_cached(
google_civic_election_id, twitter_profile_image_url_https, organization_we_vote_id=organization_we_vote_id,
twitter_id=twitter_id, twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_profile=True, kind_of_image_original=True)
if not twitter_profile_background_image_url_https:
cache_all_kind_of_images_results['cached_twitter_background_image'] = TWITTER_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_twitter_background_image'] = cache_image_if_not_cached(
google_civic_election_id, twitter_profile_background_image_url_https,
organization_we_vote_id=organization_we_vote_id, twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_background=True, kind_of_image_original=True)
if not twitter_profile_banner_url_https:
cache_all_kind_of_images_results['cached_twitter_banner_image'] = TWITTER_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_twitter_banner_image'] = cache_image_if_not_cached(
google_civic_election_id, twitter_profile_banner_url_https,
organization_we_vote_id=organization_we_vote_id, twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_banner=True, kind_of_image_original=True)
return cache_all_kind_of_images_results
def cache_voter_master_images(voter_id):
"""
Cache all kind of images locally for a voter such as profile, background
:param voter_id:
:return:
"""
cache_all_kind_of_images_results = {
'voter_id': voter_id,
'voter_we_vote_id': "",
'cached_twitter_profile_image': False,
'cached_twitter_background_image': False,
'cached_twitter_banner_image': False,
'cached_facebook_profile_image': False,
'cached_facebook_background_image': False
}
google_civic_election_id = 0
twitter_id = None
facebook_id = None
voter_address_manager = VoterAddressManager()
voter_manager = VoterManager()
voter_device_link_manager = VoterDeviceLinkManager()
voter_results = voter_manager.retrieve_voter_by_id(voter_id)
if not voter_results['voter_found']:
return cache_all_kind_of_images_results
voter = voter_results['voter']
if positive_value_exists(voter.we_vote_id):
cache_all_kind_of_images_results['voter_we_vote_id'] = voter.we_vote_id
# DALE 2018-06-19 I don't see why we need a google_civic_election_id for storing a voter's photos
voter_device_link_results = voter_device_link_manager.retrieve_voter_device_link(0, voter_id=voter_id)
if voter_device_link_results['success']:
voter_device_link = voter_device_link_results['voter_device_link']
else:
voter_device_link = VoterDeviceLink()
voter_address_results = voter_address_manager.retrieve_address(0, voter_id)
if voter_address_results['voter_address_found']:
voter_address = voter_address_results['voter_address']
else:
voter_address = VoterAddress()
from ballot.controllers import choose_election_from_existing_data
results = choose_election_from_existing_data(voter_device_link, 0, voter_address)
google_civic_election_id = results['google_civic_election_id']
else:
return cache_all_kind_of_images_results
# DALE NOTE 2017-04-23 I don't think we want to use the twitter_id stored in the voter table
# if positive_value_exists(voter.twitter_id):
# twitter_id = voter.twitter_id
# else:
twitter_user_manager = TwitterUserManager()
twitter_screen_name = ''
twitter_link_to_voter_results = twitter_user_manager.retrieve_twitter_link_to_voter_from_voter_we_vote_id(
voter.we_vote_id, read_only=True)
if twitter_link_to_voter_results['twitter_link_to_voter_found']:
twitter_link_to_voter = twitter_link_to_voter_results['twitter_link_to_voter']
twitter_id = twitter_link_to_voter.twitter_id
twitter_screen_name = twitter_link_to_voter.fetch_twitter_handle_locally_or_remotely()
# DALE NOTE 2017-04-23 I don't think we want to use the facebook_id stored in the voter table
# if positive_value_exists(voter.facebook_id):
# facebook_id = voter.facebook_id
# else:
facebook_manager = FacebookManager()
facebook_link_to_voter_results = facebook_manager.retrieve_facebook_link_to_voter_from_voter_we_vote_id(
voter.we_vote_id)
if facebook_link_to_voter_results['facebook_link_to_voter_found']:
facebook_id = facebook_link_to_voter_results['facebook_link_to_voter'].facebook_user_id
if not positive_value_exists(twitter_id) and not positive_value_exists(facebook_id):
cache_all_kind_of_images_results = {
'voter_id': voter_id,
'voter_we_vote_id': voter.we_vote_id,
'voter_object': voter,
'cached_twitter_profile_image': TWITTER_USER_DOES_NOT_EXIST,
'cached_twitter_background_image': TWITTER_USER_DOES_NOT_EXIST,
'cached_twitter_banner_image': TWITTER_USER_DOES_NOT_EXIST,
'cached_facebook_profile_image': FACEBOOK_USER_DOES_NOT_EXIST,
'cached_facebook_background_image': FACEBOOK_USER_DOES_NOT_EXIST
}
return cache_all_kind_of_images_results
if not positive_value_exists(twitter_id):
cache_all_kind_of_images_results['cached_twitter_profile_image'] = TWITTER_USER_DOES_NOT_EXIST,
cache_all_kind_of_images_results['cached_twitter_background_image'] = TWITTER_USER_DOES_NOT_EXIST,
cache_all_kind_of_images_results['cached_twitter_banner_image'] = TWITTER_USER_DOES_NOT_EXIST,
else:
# Retrieve latest twitter image urls from Twitter
latest_image_urls_results = retrieve_image_urls_from_twitter(twitter_id)
twitter_profile_image_url_https = latest_image_urls_results['latest_twitter_profile_image_url']
twitter_profile_background_image_url_https = latest_image_urls_results['latest_twitter_background_image_url']
twitter_profile_banner_url_https = latest_image_urls_results['latest_twitter_banner_image_url']
# Cache all images if not already cached
if not twitter_profile_image_url_https:
cache_all_kind_of_images_results['cached_twitter_profile_image'] = TWITTER_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_twitter_profile_image'] = cache_image_if_not_cached(
google_civic_election_id, twitter_profile_image_url_https,
voter_we_vote_id=voter.we_vote_id,
twitter_id=twitter_id, twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_profile=True, kind_of_image_original=True)
if not twitter_profile_background_image_url_https:
cache_all_kind_of_images_results['cached_twitter_background_image'] = TWITTER_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_twitter_background_image'] = cache_image_if_not_cached(
google_civic_election_id, twitter_profile_background_image_url_https,
voter_we_vote_id=voter.we_vote_id, twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_background=True, kind_of_image_original=True)
if not twitter_profile_banner_url_https:
cache_all_kind_of_images_results['cached_twitter_banner_image'] = TWITTER_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_twitter_banner_image'] = cache_image_if_not_cached(
google_civic_election_id, twitter_profile_banner_url_https,
voter_we_vote_id=voter.we_vote_id, twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_banner=True, kind_of_image_original=True)
if not positive_value_exists(facebook_id):
cache_all_kind_of_images_results['cached_facebook_profile_image'] = FACEBOOK_USER_DOES_NOT_EXIST,
cache_all_kind_of_images_results['cached_facebook_background_image'] = FACEBOOK_USER_DOES_NOT_EXIST,
else:
# Retrieve latest facebook image urls from Facebook
latest_image_urls_results = retrieve_facebook_image_url(facebook_id)
facebook_profile_image_url_https = latest_image_urls_results['facebook_profile_image_url']
facebook_background_image_url_https = latest_image_urls_results['facebook_background_image_url']
# Cache all images if not already cached
if not facebook_profile_image_url_https:
cache_all_kind_of_images_results['cached_facebook_profile_image'] = FACEBOOK_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_facebook_profile_image'] = cache_image_if_not_cached(
google_civic_election_id, facebook_profile_image_url_https,
voter_we_vote_id=voter.we_vote_id,
facebook_user_id=facebook_id, is_active_version=True,
kind_of_image_facebook_profile=True, kind_of_image_original=True)
if not facebook_background_image_url_https:
cache_all_kind_of_images_results['cached_facebook_background_image'] = FACEBOOK_URL_NOT_FOUND
else:
cache_all_kind_of_images_results['cached_facebook_background_image'] = cache_image_if_not_cached(
google_civic_election_id, facebook_background_image_url_https,
voter_we_vote_id=voter.we_vote_id, facebook_user_id=facebook_id,
is_active_version=True, kind_of_image_facebook_background=True, kind_of_image_original=True)
return cache_all_kind_of_images_results
def cache_image_locally(
google_civic_election_id,
image_url_https,
voter_we_vote_id=None,
candidate_we_vote_id=None,
organization_we_vote_id=None,
issue_we_vote_id=None,
twitter_id=None,
twitter_screen_name=None,
facebook_user_id=None,
other_source=None,
maplight_id=None,
vote_smart_id=None,
is_active_version=False,
kind_of_image_twitter_profile=False,
kind_of_image_twitter_background=False,
kind_of_image_twitter_banner=False,
kind_of_image_facebook_profile=False,
kind_of_image_facebook_background=False,
kind_of_image_maplight=False,
kind_of_image_vote_smart=False,
kind_of_image_issue=False,
kind_of_image_ballotpedia_profile=False,
kind_of_image_linkedin_profile=False,
kind_of_image_wikipedia_profile=False,
kind_of_image_other_source=False,
kind_of_image_original=False,
kind_of_image_large=False,
kind_of_image_medium=False,
kind_of_image_tiny=False,
facebook_background_image_offset_x=False,
facebook_background_image_offset_y=False):
"""
Cache one type of image
:param google_civic_election_id:
:param image_url_https:
:param voter_we_vote_id:
:param candidate_we_vote_id:
:param organization_we_vote_id:
:param issue_we_vote_id:
:param twitter_id:
:param twitter_screen_name:
:param facebook_user_id:
:param other_source: # can be MapLight or VoteSmart
:param maplight_id:
:param vote_smart_id:
:param other_source_profile_image_url: # TODO need to find a way to get this
:param is_active_version:
:param kind_of_image_twitter_profile:
:param kind_of_image_twitter_background:
:param kind_of_image_twitter_banner:
:param kind_of_image_facebook_profile:
:param kind_of_image_facebook_background:
:param kind_of_image_maplight:
:param kind_of_image_vote_smart:
:param kind_of_image_issue:
:param kind_of_image_ballotpedia_profile:
:param kind_of_image_linkedin_profile:
:param kind_of_image_wikipedia_profile:
:param kind_of_image_other_source:
:param kind_of_image_original:
:param kind_of_image_large:
:param kind_of_image_medium:
:param kind_of_image_tiny:
:param facebook_background_image_offset_x:
:param facebook_background_image_offset_y:
:return:
"""
we_vote_parent_image_id = None
success = False
status = ''
we_vote_image_created = False
image_url_valid = False
image_stored_from_source = False
image_stored_locally = False
image_stored_to_aws = False
image_versions = []
we_vote_image_manager = WeVoteImageManager()
# create we_vote_image entry with voter_we_vote_id and google_civic_election_id and kind_of_image
create_we_vote_image_results = we_vote_image_manager.create_we_vote_image(
google_civic_election_id=google_civic_election_id,
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_original=kind_of_image_original,
kind_of_image_large=kind_of_image_large,
kind_of_image_medium=kind_of_image_medium,
kind_of_image_tiny=kind_of_image_tiny,
facebook_background_image_offset_x=facebook_background_image_offset_x,
facebook_background_image_offset_y=facebook_background_image_offset_y)
status += create_we_vote_image_results['status']
if not create_we_vote_image_results['we_vote_image_saved']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'image_stored_to_aws': image_stored_to_aws,
}
return error_results
we_vote_image_created = True
we_vote_image = create_we_vote_image_results['we_vote_image']
# Image url validation and get source image properties
analyze_source_images_results = analyze_source_images(
twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name,
facebook_user_id=facebook_user_id,
maplight_id=maplight_id,
vote_smart_id=vote_smart_id,
image_url_https=image_url_https,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
other_source=other_source)
if 'analyze_image_url_results' not in analyze_source_images_results or \
'image_url_valid' not in analyze_source_images_results['analyze_image_url_results'] or not \
analyze_source_images_results['analyze_image_url_results']['image_url_valid']:
error_results = {
'success': success,
'status': status + " IMAGE_URL_NOT_VALID",
'we_vote_image_created': True,
'image_url_valid': False,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
image_url_valid = True
status += " IMAGE_URL_VALID"
# Get today's cached images and their versions so that image version can be calculated
cached_todays_we_vote_image_list_results = we_vote_image_manager.retrieve_todays_cached_we_vote_image_list(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_original=kind_of_image_original)
for cached_we_vote_image in cached_todays_we_vote_image_list_results['we_vote_image_list']:
if cached_we_vote_image.same_day_image_version:
image_versions.append(cached_we_vote_image.same_day_image_version)
if image_versions:
same_day_image_version = max(image_versions) + 1
else:
same_day_image_version = 1
if kind_of_image_facebook_profile or kind_of_image_facebook_background:
# image url is valid so store source image of facebook to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_facebook_info(
we_vote_image, facebook_user_id, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'],
image_url_https, same_day_image_version, kind_of_image_facebook_profile,
kind_of_image_facebook_background, image_url_valid)
elif kind_of_image_twitter_profile or kind_of_image_twitter_background or kind_of_image_twitter_banner:
# image url is valid so store source image of twitter to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_twitter_info(
we_vote_image, twitter_id, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'],
image_url_https, same_day_image_version, kind_of_image_twitter_profile,
kind_of_image_twitter_background, kind_of_image_twitter_banner, image_url_valid)
elif kind_of_image_maplight:
save_source_info_results = we_vote_image_manager.save_we_vote_image_maplight_info(
we_vote_image, maplight_id, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'],
image_url_https, same_day_image_version, kind_of_image_maplight, image_url_valid)
elif kind_of_image_vote_smart:
save_source_info_results = we_vote_image_manager.save_we_vote_image_vote_smart_info(
we_vote_image, vote_smart_id, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'],
image_url_https, same_day_image_version, kind_of_image_vote_smart, image_url_valid)
elif kind_of_image_ballotpedia_profile:
save_source_info_results = we_vote_image_manager.save_we_vote_image_ballotpedia_info(
we_vote_image, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'],
image_url_https, same_day_image_version, kind_of_image_ballotpedia_profile, image_url_valid)
elif kind_of_image_linkedin_profile:
save_source_info_results = we_vote_image_manager.save_we_vote_image_linkedin_info(
we_vote_image, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'],
image_url_https, same_day_image_version, kind_of_image_linkedin_profile, image_url_valid)
elif kind_of_image_wikipedia_profile:
save_source_info_results = we_vote_image_manager.save_we_vote_image_wikipedia_info(
we_vote_image, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'],
image_url_https, same_day_image_version, kind_of_image_wikipedia_profile, image_url_valid)
elif kind_of_image_other_source:
save_source_info_results = we_vote_image_manager.save_we_vote_image_other_source_info(
we_vote_image, analyze_source_images_results['analyze_image_url_results']['image_width'],
analyze_source_images_results['analyze_image_url_results']['image_height'], other_source,
image_url_https, same_day_image_version, kind_of_image_other_source, image_url_valid)
status += " " + save_source_info_results['status']
if save_source_info_results['success']:
image_stored_from_source = True
date_image_saved = "{year}{:02d}{:02d}".format(we_vote_image.date_image_saved.month,
we_vote_image.date_image_saved.day,
year=we_vote_image.date_image_saved.year)
# ex twitter_profile_image_master-2017210_1_48x48.png
analyze_image_url_results = analyze_source_images_results['analyze_image_url_results']
image_width = analyze_image_url_results['image_width'] if 'image_width' in analyze_image_url_results else 0
image_height = analyze_image_url_results['image_height'] if 'image_height' in analyze_image_url_results else 0
image_format = analyze_image_url_results['image_format'] if 'image_format' in analyze_image_url_results else ''
we_vote_image_file_name = \
"{image_type}_{master_image}-{date_image_saved}_{counter}_" \
"{image_width}x{image_height}.{image_format}" \
"".format(
image_type=analyze_source_images_results['image_type'],
master_image=MASTER_IMAGE,
date_image_saved=date_image_saved,
counter=str(same_day_image_version),
image_width=str(image_width),
image_height=str(image_height),
image_format=str(image_format))
if voter_we_vote_id:
we_vote_image_file_location = voter_we_vote_id + "/" + we_vote_image_file_name
elif candidate_we_vote_id:
we_vote_image_file_location = candidate_we_vote_id + "/" + we_vote_image_file_name
elif organization_we_vote_id:
we_vote_image_file_location = organization_we_vote_id + "/" + we_vote_image_file_name
else:
we_vote_image_file_location = we_vote_image_file_name
image_stored_locally = we_vote_image_manager.store_image_locally(
analyze_source_images_results['image_url_https'], we_vote_image_file_name)
if not image_stored_locally:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_LOCALLY ",
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': False,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
status += " IMAGE_STORED_LOCALLY "
image_stored_to_aws = we_vote_image_manager.store_image_to_aws(
we_vote_image_file_name, we_vote_image_file_location,
analyze_source_images_results['analyze_image_url_results']['image_format'])
if not image_stored_to_aws:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_TO_AWS ",
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'image_stored_to_aws': False,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
we_vote_image_url = "https://{bucket_name}.s3.amazonaws.com/{we_vote_image_file_location}" \
"".format(bucket_name=AWS_STORAGE_BUCKET_NAME,
we_vote_image_file_location=we_vote_image_file_location)
save_aws_info = we_vote_image_manager.save_we_vote_image_aws_info(we_vote_image, we_vote_image_url,
we_vote_image_file_location,
we_vote_parent_image_id, is_active_version)
status += " IMAGE_STORED_TO_AWS " + save_aws_info['status'] + " "
success = save_aws_info['success']
if not success:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
else:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': False,
'image_stored_locally': image_stored_locally,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'image_stored_to_aws': image_stored_to_aws,
}
return results
def retrieve_facebook_image_url(facebook_user_id):
"""
Retrieve facebook profile url from Facebook and background url from FacebookUser table.
:param facebook_user_id:
:return:
"""
results = {
'facebook_profile_image_url': None,
'facebook_background_image_url': None
}
facebook_manager = FacebookManager()
get_url = "https://graph.facebook.com/v3.1/{facebook_user_id}/picture?width=200&height=200"\
.format(facebook_user_id=facebook_user_id)
response = requests.get(get_url)
if response.status_code == HTTP_OK:
# new facebook profile image url found
results['facebook_profile_image_url'] = response.url
facebook_user_results = facebook_manager.retrieve_facebook_user_by_facebook_user_id(facebook_user_id)
if facebook_user_results['facebook_user_found']:
results['facebook_background_image_url'] = \
facebook_user_results['facebook_user'].facebook_background_image_url_https
return results
def retrieve_and_save_ballotpedia_candidate_images(candidate):
from import_export_ballotpedia.controllers import retrieve_ballotpedia_candidate_image_from_api
status = ""
candidate_manager = CandidateManager()
politician_manager = PoliticianManager()
if not candidate:
status += "BALLOTPEDIA_CANDIDATE_IMAGE_NOT_RETRIEVED-CANDIDATE_MISSING "
results = {
'success': False,
'status': status,
'candidate': None,
}
return results
if positive_value_exists(candidate.ballotpedia_image_id):
status += "BALLOTPEDIA_CANDIDATE_IMAGE-REACHING_OUT_TO_BALLOTPEDIA "
results = retrieve_ballotpedia_candidate_image_from_api(
candidate.ballotpedia_image_id, candidate.google_civic_election_id)
if results['success']:
status += "BALLOTPEDIA_CANDIDATE_IMAGE_RETRIEVED "
# Get original image url for cache original size image
ballotpedia_profile_image_url_https = results['profile_image_url_https']
cache_results = cache_master_and_resized_image(
candidate_id=candidate.id,
candidate_we_vote_id=candidate.we_vote_id,
ballotpedia_profile_image_url=ballotpedia_profile_image_url_https,
image_source=BALLOTPEDIA_IMAGE_SOURCE)
cached_ballotpedia_image_url_https = cache_results['cached_ballotpedia_image_url_https']
we_vote_hosted_profile_image_url_large = cache_results['we_vote_hosted_profile_image_url_large']
we_vote_hosted_profile_image_url_medium = cache_results['we_vote_hosted_profile_image_url_medium']
we_vote_hosted_profile_image_url_tiny = cache_results['we_vote_hosted_profile_image_url_tiny']
save_candidate_results = candidate_manager.update_candidate_ballotpedia_image_details(
candidate,
cached_ballotpedia_image_url_https,
we_vote_hosted_profile_image_url_large,
we_vote_hosted_profile_image_url_medium,
we_vote_hosted_profile_image_url_tiny)
candidate = save_candidate_results['candidate']
# Need to update voter ballotpedia details for the candidate in future
save_politician_details_results = politician_manager.update_politician_details_from_candidate(
candidate)
save_position_from_candidate_results = update_all_position_details_from_candidate(candidate)
else:
status += "BALLOTPEDIA_CANDIDATE_IMAGE-CLEARING_DETAILS "
# save_candidate_results = candidate_manager.clear_candidate_twitter_details(
# candidate)
results = {
'success': True,
'status': status,
'candidate': candidate,
}
return results
def retrieve_twitter_image_url(twitter_id, kind_of_image_twitter_profile=False,
kind_of_image_twitter_background=False,
kind_of_image_twitter_banner=False):
"""
Retrieve twitter image urls from TwitterUser table.
:param twitter_id:
:param kind_of_image_twitter_profile:
:param kind_of_image_twitter_background:
:param kind_of_image_twitter_banner:
:return:
"""
twitter_image_url = None
twitter_user_manager = TwitterUserManager()
twitter_user_results = twitter_user_manager.retrieve_twitter_user(twitter_id)
if twitter_user_results['twitter_user_found']:
if kind_of_image_twitter_profile:
twitter_image_url = twitter_user_results['twitter_user'].twitter_profile_image_url_https
elif kind_of_image_twitter_background:
twitter_image_url = twitter_user_results['twitter_user'].twitter_profile_background_image_url_https
elif kind_of_image_twitter_banner:
twitter_image_url = twitter_user_results['twitter_user'].twitter_profile_banner_url_https
return twitter_user_results['twitter_user'], twitter_image_url
def retrieve_image_urls_from_twitter(twitter_id):
"""
Retrieve latest twitter profile, background and banner image url from twitter API call
:param twitter_id:
:return:
"""
latest_twitter_profile_image_url = None
latest_twitter_background_image_url = None
latest_twitter_banner_image_url = None
twitter_user_info_results = retrieve_twitter_user_info(twitter_id, twitter_handle='')
if 'profile_image_url_https' in twitter_user_info_results['twitter_json'] \
and twitter_user_info_results['twitter_json']['profile_image_url_https']:
# new twitter image url found
latest_twitter_profile_image_url = twitter_user_info_results['twitter_json'][
'profile_image_url_https']
if 'profile_background_image_url_https' in twitter_user_info_results['twitter_json'] \
and twitter_user_info_results['twitter_json']['profile_background_image_url_https']:
# new twitter image url found
latest_twitter_background_image_url = twitter_user_info_results['twitter_json'][
'profile_background_image_url_https']
if 'profile_banner_url' in twitter_user_info_results['twitter_json'] \
and twitter_user_info_results['twitter_json']['profile_banner_url']:
# new twitter image url found
latest_twitter_banner_image_url = twitter_user_info_results['twitter_json'][
'profile_banner_url']
results = {
'latest_twitter_profile_image_url': latest_twitter_profile_image_url,
'latest_twitter_background_image_url': latest_twitter_background_image_url,
'latest_twitter_banner_image_url': latest_twitter_banner_image_url,
}
return results
def analyze_source_images(
twitter_id=0,
twitter_screen_name='',
facebook_user_id=0,
maplight_id=0,
vote_smart_id=0,
image_url_https="",
kind_of_image_twitter_profile=False,
kind_of_image_twitter_background=False,
kind_of_image_twitter_banner=False,
kind_of_image_facebook_profile=False,
kind_of_image_facebook_background=False,
kind_of_image_maplight=False,
kind_of_image_vote_smart=False,
kind_of_image_ballotpedia_profile=False,
kind_of_image_campaignx_photo=False,
kind_of_image_linkedin_profile=False,
kind_of_image_wikipedia_profile=False,
kind_of_image_other_source=False,
other_source=False):
"""
:param twitter_id:
:param twitter_screen_name:
:param facebook_user_id:
:param maplight_id:
:param vote_smart_id:
:param image_url_https:
:param kind_of_image_twitter_profile:
:param kind_of_image_twitter_background:
:param kind_of_image_twitter_banner:
:param kind_of_image_facebook_profile:
:param kind_of_image_facebook_background:
:param kind_of_image_maplight:
:param kind_of_image_vote_smart:
:param kind_of_image_ballotpedia_profile:
:param kind_of_image_campaignx_photo:
:param kind_of_image_linkedin_profile:
:param kind_of_image_wikipedia_profile:
:param kind_of_image_other_source:
:param other_source:
:return:
"""
image_type = None
if kind_of_image_twitter_profile:
image_type = TWITTER_PROFILE_IMAGE_NAME
elif kind_of_image_twitter_background:
image_type = TWITTER_BACKGROUND_IMAGE_NAME
elif kind_of_image_twitter_banner:
image_type = TWITTER_BANNER_IMAGE_NAME
elif kind_of_image_facebook_profile:
image_type = FACEBOOK_PROFILE_IMAGE_NAME
elif kind_of_image_facebook_background:
image_type = FACEBOOK_BACKGROUND_IMAGE_NAME
elif kind_of_image_maplight:
image_type = MAPLIGHT_IMAGE_NAME
elif kind_of_image_vote_smart:
image_type = VOTE_SMART_IMAGE_NAME
elif kind_of_image_ballotpedia_profile:
image_type = BALLOTPEDIA_IMAGE_NAME
elif kind_of_image_campaignx_photo:
image_type = CAMPAIGNX_PHOTO_IMAGE_NAME
elif kind_of_image_linkedin_profile:
image_type = LINKEDIN_IMAGE_NAME
elif kind_of_image_wikipedia_profile:
image_type = WIKIPEDIA_IMAGE_NAME
elif kind_of_image_other_source:
image_type = other_source
analyze_image_url_results = analyze_remote_url(image_url_https)
results = {
'twitter_id': twitter_id,
'twitter_screen_name': twitter_screen_name,
'facebook_user_id': facebook_user_id,
'maplight_id': maplight_id,
'vote_smart_id': vote_smart_id,
'image_url_https': image_url_https,
'image_type': image_type,
'analyze_image_url_results': analyze_image_url_results
}
return results
def create_resized_images_for_all_organizations():
"""
Create resized images for all organizations
:return:
"""
create_all_resized_images_results = []
we_vote_image_list = WeVoteImage.objects.all()
# TODO Limit this to organizations only
for we_vote_image in we_vote_image_list:
# Iterate through all cached images
create_resized_images_results = create_resized_image_if_not_created(we_vote_image)
create_all_resized_images_results.append(create_resized_images_results)
return create_all_resized_images_results
def create_resized_images_for_all_voters():
"""
Create resized images for all voters
:return:
"""
create_all_resized_images_results = []
we_vote_image_list = WeVoteImage.objects.all()
# TODO Limit this to voters only
for we_vote_image in we_vote_image_list:
# Iterate through all cached images
create_resized_images_results = create_resized_image_if_not_created(we_vote_image)
create_all_resized_images_results.append(create_resized_images_results)
return create_all_resized_images_results
def delete_cached_images_for_candidate(candidate):
original_twitter_profile_image_url_https = None
original_twitter_profile_background_image_url_https = None
original_twitter_profile_banner_url_https = None
delete_image_count = 0
not_deleted_image_count = 0
we_vote_image_list = retrieve_all_images_for_one_candidate(candidate.we_vote_id)
if len(we_vote_image_list) > 0:
we_vote_image_manager = WeVoteImageManager()
for we_vote_image in we_vote_image_list:
if we_vote_image.kind_of_image_twitter_profile and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_image_url_https = we_vote_image.twitter_profile_image_url_https
if we_vote_image.kind_of_image_twitter_background and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_background_image_url_https = \
we_vote_image.twitter_profile_background_image_url_https
if we_vote_image.kind_of_image_twitter_banner and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_banner_url_https = we_vote_image.twitter_profile_banner_url_https
# Reset CandidateCampaign with original image details
candidate_manager = CandidateManager()
reset_candidate_image_results = candidate_manager.reset_candidate_image_details(
candidate, original_twitter_profile_image_url_https, original_twitter_profile_background_image_url_https,
original_twitter_profile_banner_url_https)
# Reset Twitter User Table with original image details
twitter_user_manager = TwitterUserManager()
reset_twitter_user_image_results = twitter_user_manager.reset_twitter_user_image_details(
candidate.twitter_user_id, original_twitter_profile_image_url_https,
original_twitter_profile_background_image_url_https, original_twitter_profile_banner_url_https)
# Reset Position Table with original image details
reset_position_image_results = reset_all_position_image_details_from_candidate(
candidate, original_twitter_profile_image_url_https)
# Reset Politician Table with original image details
politician_manager = PoliticianManager()
reset_politician_image_results = politician_manager.reset_politician_image_details_from_candidate(
candidate, original_twitter_profile_image_url_https, original_twitter_profile_background_image_url_https,
original_twitter_profile_banner_url_https)
if reset_candidate_image_results['success']:
for we_vote_image in we_vote_image_list:
# Delete image from AWS
image_deleted_from_aws = we_vote_image_manager.delete_image_from_aws(
we_vote_image.we_vote_image_file_location)
delete_result = we_vote_image_manager.delete_we_vote_image(we_vote_image)
if delete_result['success']:
delete_image_count += 1
else:
not_deleted_image_count += 1
success = True
status = "DELETED_CACHED_IMAGES_FOR_CANDIDATE"
else:
success = False
status = "NO_IMAGE_FOUND_FOR_CANDIDATE"
results = {
'success': success,
'status': status,
'delete_image_count': delete_image_count,
'not_deleted_image_count': not_deleted_image_count,
}
return results
def delete_cached_images_for_issue(issue):
delete_image_count = 0
not_deleted_image_count = 0
we_vote_image_list = retrieve_all_images_for_one_issue(issue.we_vote_id)
if len(we_vote_image_list) > 0:
we_vote_image_manager = WeVoteImageManager()
# Reset Issue with original image details
issue_manager = IssueManager()
reset_candidate_image_results = issue_manager.reset_issue_image_details(
issue, issue_image_url='')
if reset_candidate_image_results['success']:
for we_vote_image in we_vote_image_list:
# Delete image from AWS
image_deleted_from_aws = we_vote_image_manager.delete_image_from_aws(
we_vote_image.we_vote_image_file_location)
delete_result = we_vote_image_manager.delete_we_vote_image(we_vote_image)
if delete_result['success']:
delete_image_count += 1
else:
not_deleted_image_count += 1
success = True
status = "DELETED_CACHED_IMAGES_FOR_ISSUE"
else:
success = False
status = "NO_IMAGE_FOUND_FOR_ISSUE"
results = {
'success': success,
'status': status,
'delete_image_count': delete_image_count,
'not_deleted_image_count': not_deleted_image_count,
}
return results
def delete_cached_images_for_organization(organization):
original_twitter_profile_image_url_https = None
original_twitter_profile_background_image_url_https = None
original_twitter_profile_banner_url_https = None
original_facebook_profile_image_url_https = None
original_facebook_background_image_url_https = None
delete_image_count = 0
not_deleted_image_count = 0
we_vote_image_list = retrieve_all_images_for_one_organization(organization.we_vote_id)
if len(we_vote_image_list) > 0:
we_vote_image_manager = WeVoteImageManager()
for we_vote_image in we_vote_image_list:
if we_vote_image.kind_of_image_twitter_profile and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_image_url_https = we_vote_image.twitter_profile_image_url_https
if we_vote_image.kind_of_image_twitter_background and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_background_image_url_https = \
we_vote_image.twitter_profile_background_image_url_https
if we_vote_image.kind_of_image_twitter_banner and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_banner_url_https = we_vote_image.twitter_profile_banner_url_https
if we_vote_image.kind_of_image_facebook_profile and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_facebook_profile_image_url_https = we_vote_image.facebook_profile_image_url_https
if we_vote_image.kind_of_image_facebook_background and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_facebook_background_image_url_https = we_vote_image.facebook_background_image_url_https
# Reset Organization with original image details
organization_manager = OrganizationManager()
reset_organization_image_results = organization_manager.reset_organization_image_details(
organization, original_twitter_profile_image_url_https, original_twitter_profile_background_image_url_https,
original_twitter_profile_banner_url_https, original_facebook_profile_image_url_https)
# Reset Twitter User Table with original image details
twitter_user_manager = TwitterUserManager()
reset_twitter_user_image_results = twitter_user_manager.reset_twitter_user_image_details(
organization.twitter_user_id, original_twitter_profile_image_url_https,
original_twitter_profile_background_image_url_https, original_twitter_profile_banner_url_https)
# Reset Position Table with original image details
reset_position_image_results = reset_position_entered_image_details_from_organization(
organization, original_twitter_profile_image_url_https, original_facebook_profile_image_url_https)
# Reset Voter Guide table with original image details
voter_guide_manager = VoterGuideManager()
reset_voter_guide_image_results = voter_guide_manager.reset_voter_guide_image_details(
organization, original_twitter_profile_image_url_https, original_facebook_profile_image_url_https)
# Reset Voter with original image details
voter_manager = VoterManager()
voter_results = voter_manager.retrieve_voter_by_organization_we_vote_id(organization.we_vote_id)
voter = voter_results['voter']
if voter_results['voter_found']:
reset_voter_image_results = voter_manager.reset_voter_image_details(
voter, original_twitter_profile_image_url_https, original_facebook_profile_image_url_https)
# Reset Facebook User Table with original image details
facebook_manager = FacebookManager()
reset_facebook_user_image_results = facebook_manager.reset_facebook_user_image_details(
organization.facebook_id, original_facebook_profile_image_url_https,
original_facebook_background_image_url_https)
if reset_organization_image_results['success']:
for we_vote_image in we_vote_image_list:
# Delete image from AWS
image_deleted_from_aws = we_vote_image_manager.delete_image_from_aws(
we_vote_image.we_vote_image_file_location)
delete_result = we_vote_image_manager.delete_we_vote_image(we_vote_image)
if delete_result['success']:
delete_image_count += 1
else:
not_deleted_image_count += 1
success = True
status = "DELETED_CACHED_IMAGES_FOR_CANDIDATE"
else:
success = False
status = "NO_IMAGE_FOUND_FOR_CANDIDATE"
results = {
'success': success,
'status': status,
'delete_image_count': delete_image_count,
'not_deleted_image_count': not_deleted_image_count,
}
return results
def delete_cached_images_for_voter(voter):
original_twitter_profile_image_url_https = None
original_twitter_profile_background_image_url_https = None
original_twitter_profile_banner_url_https = None
original_facebook_profile_image_url_https = None
original_facebook_background_image_url_https = None
delete_image_count = 0
not_deleted_image_count = 0
we_vote_image_list = retrieve_all_images_for_one_voter(voter.id)
if len(we_vote_image_list) > 0:
we_vote_image_manager = WeVoteImageManager()
for we_vote_image in we_vote_image_list:
if we_vote_image.kind_of_image_twitter_profile and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_image_url_https = we_vote_image.twitter_profile_image_url_https
if we_vote_image.kind_of_image_twitter_background and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_background_image_url_https = \
we_vote_image.twitter_profile_background_image_url_https
if we_vote_image.kind_of_image_twitter_banner and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_twitter_profile_banner_url_https = we_vote_image.twitter_profile_banner_url_https
if we_vote_image.kind_of_image_facebook_profile and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_facebook_profile_image_url_https = we_vote_image.facebook_profile_image_url_https
if we_vote_image.kind_of_image_facebook_background and we_vote_image.kind_of_image_original and \
we_vote_image.is_active_version:
original_facebook_background_image_url_https = we_vote_image.facebook_background_image_url_https
# Reset Voter with original image details
voter_manager = VoterManager()
reset_voter_image_results = voter_manager.reset_voter_image_details(
voter, original_twitter_profile_image_url_https, original_facebook_profile_image_url_https)
# Reset Twitter User Table with original image details
twitter_user_manager = TwitterUserManager()
reset_twitter_user_image_results = twitter_user_manager.reset_twitter_user_image_details(
voter.twitter_id, original_twitter_profile_image_url_https,
original_twitter_profile_background_image_url_https, original_twitter_profile_banner_url_https)
# Reset Organization with original image details
organization_manager = OrganizationManager()
organization_results = organization_manager.retrieve_organization(0, '', '', voter.twitter_id)
organization = organization_results['organization']
if organization_results['organization_found']:
reset_organization_image_results = organization_manager.reset_organization_image_details(
organization, original_twitter_profile_image_url_https,
original_twitter_profile_background_image_url_https, original_twitter_profile_banner_url_https,
original_facebook_profile_image_url_https)
# Reset Position Table with original image details
reset_position_image_results = reset_position_for_friends_image_details_from_voter(
voter, original_twitter_profile_image_url_https, original_facebook_profile_image_url_https)
# Reset Facebook User Table with original image details
facebook_manager = FacebookManager()
reset_facebook_user_image_results = facebook_manager.reset_facebook_user_image_details(
voter.facebook_id, original_facebook_profile_image_url_https, original_facebook_background_image_url_https)
if reset_voter_image_results['success']:
for we_vote_image in we_vote_image_list:
# Delete image from AWS
image_deleted_from_aws = we_vote_image_manager.delete_image_from_aws(
we_vote_image.we_vote_image_file_location)
delete_result = we_vote_image_manager.delete_we_vote_image(we_vote_image)
if delete_result['success']:
delete_image_count += 1
else:
not_deleted_image_count += 1
success = True
status = "DELETED_CACHED_IMAGES_FOR_VOTER"
else:
success = False
status = "NO_IMAGE_FOUND_FOR_VOTER"
results = {
'success': success,
'status': status,
'delete_image_count': delete_image_count,
'not_deleted_image_count': not_deleted_image_count,
}
return results
def delete_stored_images_for_voter(voter):
"""
This method actually removes all image data from the Voter, Facebook, and Twitter tables for this voter
Call delete_cached_images_for_voter() before calling this one, to remove all the cached images from AWS, otherwise
the cached images will stay in AWS as unreferenced wasted storage
"""
success = False
# Delete Voter images
voter_manager = VoterManager()
voter_results = voter_manager.retrieve_voter_by_we_vote_id(voter.we_vote_id)
voter = voter_results['voter']
if voter_results['voter_found']:
voter.twitter_profile_image_url_https = ''
voter.we_vote_hosted_profile_image_url_large = ''
voter.we_vote_hosted_profile_image_url_medium = ''
voter.we_vote_hosted_profile_image_url_tiny = ''
voter.facebook_profile_image_url_https = ''
voter.save()
success = True
# Delete Twitter User Table images
if positive_value_exists(voter.twitter_id):
twitter_user_manager = TwitterUserManager()
twitter_results = twitter_user_manager.retrieve_twitter_user(voter.twitter_id)
twitter_user_found = twitter_results['twitter_user_found']
twitter_user = twitter_results['twitter_user']
if twitter_user_found:
twitter_user.twitter_profile_image_url_https = ''
twitter_user.twitter_profile_background_image_url_https = ''
twitter_user.twitter_profile_banner_url_https = ''
twitter_user.we_vote_hosted_profile_image_url_large = ''
twitter_user.we_vote_hosted_profile_image_url_medium = ''
twitter_user.we_vote_hosted_profile_image_url_tiny = ''
twitter_user.save()
success = True
# Delete Organization images, Dec 2019, not for now, don't want to cause exceptions
# Delete Position Table images, Dec 2019, not for now, don't want to cause exceptions
# Delete Facebook User Table images
if positive_value_exists(voter.facebook_id):
facebook_manager = FacebookManager()
facebook_user_results = facebook_manager.retrieve_facebook_user_by_facebook_user_id(voter.facebook_id)
facebook_user = facebook_user_results['facebook_user']
if facebook_user_results['facebook_user_found']:
facebook_user.facebook_profile_image_url_https = ''
facebook_user.facebook_background_image_url_https = ''
facebook_user.we_vote_hosted_profile_image_url_large = ''
facebook_user.we_vote_hosted_profile_image_url_medium = ''
facebook_user.we_vote_hosted_profile_image_url_tiny = ''
facebook_user.save()
success = True
# Delete FacebookAuthResponse Table images, Dec 2019, not for now, as a result image will display when voter signsin
# Delete TwitterAuthResponse Table images, Dec 2019, not for now, as a result image will display when voter signs in
if success:
status = "DELETED_STORED_IMAGES_FOR_VOTER"
else:
status = "NO_IMAGE_FOUND_FOR_VOTER"
results = {
'success': success,
'status': status,
}
return results
def retrieve_all_images_for_one_candidate(candidate_we_vote_id):
"""
Retrieve all cached images for one candidate
:param candidate_we_vote_id:
:return:
"""
we_vote_image_list = []
candidate_manager = CandidateManager()
we_vote_image_manager = WeVoteImageManager()
if positive_value_exists(candidate_we_vote_id):
# if candidate_we_vote_id is defined then retrieve cached images for that candidate only
candidate_results = candidate_manager.retrieve_candidate_from_we_vote_id(candidate_we_vote_id)
if candidate_results['candidate_found']:
we_vote_image_list_results = we_vote_image_manager.\
retrieve_we_vote_image_list_from_we_vote_id(None, candidate_we_vote_id)
we_vote_image_list_query = we_vote_image_list_results['we_vote_image_list']
we_vote_image_list = list(we_vote_image_list_query)
return we_vote_image_list
def retrieve_all_images_for_one_organization(organization_we_vote_id):
"""
Retrieve all cached images for one organization
:param organization_we_vote_id:
:return:
"""
we_vote_image_list = []
organization_manager = OrganizationManager()
we_vote_image_manager = WeVoteImageManager()
if positive_value_exists(organization_we_vote_id):
# if candidate_we_vote_id is defined then retrieve cached images for that candidate only
organization_results = organization_manager.retrieve_organization_from_we_vote_id(organization_we_vote_id)
if organization_results['organization_found']:
we_vote_image_list_results = we_vote_image_manager.\
retrieve_we_vote_image_list_from_we_vote_id(None, None, organization_we_vote_id)
we_vote_image_list_query = we_vote_image_list_results['we_vote_image_list']
we_vote_image_list = list(we_vote_image_list_query)
return we_vote_image_list
def cache_and_create_resized_images_for_organization(organization_we_vote_id):
"""
Create resized images for specific organization
:param organization_we_vote_id:
:return:
"""
create_all_resized_images_results = []
organization_manager = OrganizationManager()
we_vote_image_manager = WeVoteImageManager()
# cache original image
cache_images_for_one_organization_results = cache_organization_master_images(
organization_we_vote_id)
# create resized images for that organization only
organization_results = organization_manager.retrieve_organization_from_we_vote_id(organization_we_vote_id)
if organization_results['success']:
organization_we_vote_id = organization_results['organization'].we_vote_id
we_vote_image_list_results = we_vote_image_manager.\
retrieve_we_vote_image_list_from_we_vote_id(None, None, organization_we_vote_id)
for we_vote_image in we_vote_image_list_results['we_vote_image_list']:
# Iterate through all cached images
create_resized_images_results = create_resized_image_if_not_created(we_vote_image)
create_resized_images_results.update(cache_images_for_one_organization_results)
create_all_resized_images_results.append(create_resized_images_results)
return create_all_resized_images_results
def cache_and_create_resized_images_for_voter(voter_id):
"""
Create resized images for specific voter_id
:param voter_id:
:return:
"""
create_all_resized_images_results = []
voter_manager = VoterManager()
we_vote_image_manager = WeVoteImageManager()
# cache original image
cache_images_for_a_voter_results = cache_voter_master_images(voter_id)
# create resized images for that voter only
voter_results = voter_manager.retrieve_voter_by_id(voter_id)
if voter_results['success']:
voter_we_vote_id = voter_results['voter'].we_vote_id
we_vote_image_list_results = we_vote_image_manager.\
retrieve_we_vote_image_list_from_we_vote_id(voter_we_vote_id)
for we_vote_image in we_vote_image_list_results['we_vote_image_list']:
# Iterate through all cached images
create_resized_images_results = create_resized_image_if_not_created(we_vote_image)
create_resized_images_results.update(cache_images_for_a_voter_results)
create_all_resized_images_results.append(create_resized_images_results)
return create_all_resized_images_results
def cache_campaignx_image(
python_image_library_image=None,
campaignx_we_vote_id=None,
kind_of_image_original=False,
kind_of_image_campaignx_photo=False):
"""
Cache master campaignx images to AWS.
:param python_image_library_image:
:param campaignx_we_vote_id:
:param kind_of_image_original:
:param kind_of_image_campaignx_photo:
:return:
"""
we_vote_parent_image_id = None
success = False
status = ''
is_active_version = True
we_vote_image_created = False
image_url_valid = False
image_stored_from_source = False
image_stored_to_aws = False
image_versions = []
we_vote_image_manager = WeVoteImageManager()
create_we_vote_image_results = we_vote_image_manager.create_we_vote_image(
campaignx_we_vote_id=campaignx_we_vote_id,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_original=kind_of_image_original)
status += create_we_vote_image_results['status']
if not create_we_vote_image_results['we_vote_image_saved']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
return error_results
we_vote_image_created = True
we_vote_image = create_we_vote_image_results['we_vote_image']
# image file validation and get source image properties
analyze_source_images_results = analyze_image_in_memory(python_image_library_image)
if not analyze_source_images_results['image_url_valid']:
error_results = {
'success': success,
'status': status + " IMAGE_URL_NOT_VALID ",
'we_vote_image_created': True,
'image_url_valid': False,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
image_url_valid = True
status += " IMAGE_URL_VALID "
image_width = analyze_source_images_results['image_width']
image_height = analyze_source_images_results['image_height']
image_format = analyze_source_images_results['image_format']
# Get today's cached images and their versions so that image version can be calculated
cached_todays_we_vote_image_list_results = we_vote_image_manager.retrieve_todays_cached_we_vote_image_list(
campaignx_we_vote_id=campaignx_we_vote_id,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_original=kind_of_image_original)
for cached_we_vote_image in cached_todays_we_vote_image_list_results['we_vote_image_list']:
if cached_we_vote_image.same_day_image_version:
image_versions.append(cached_we_vote_image.same_day_image_version)
if image_versions:
same_day_image_version = max(image_versions) + 1
else:
same_day_image_version = 1
image_stored_from_source = True
date_image_saved = "{year}{:02d}{:02d}".format(we_vote_image.date_image_saved.month,
we_vote_image.date_image_saved.day,
year=we_vote_image.date_image_saved.year)
if kind_of_image_campaignx_photo:
image_type = CAMPAIGNX_PHOTO_IMAGE_NAME
else:
image_type = 'campaignx_photo_image'
if kind_of_image_original:
master_image = MASTER_IMAGE
else:
master_image = 'calculated'
# ex issue_image_master-2017210_1_48x48.png
we_vote_image_file_name = "{image_type}_{master_image}-{date_image_saved}_{counter}_" \
"{image_width}x{image_height}.{image_format}" \
"".format(image_type=image_type,
master_image=master_image,
date_image_saved=date_image_saved,
counter=str(same_day_image_version),
image_width=str(image_width),
image_height=str(image_height),
image_format=str(image_format))
we_vote_image_file_location = campaignx_we_vote_id + "/" + we_vote_image_file_name
image_stored_locally = we_vote_image_manager.store_python_image_locally(
python_image_library_image, we_vote_image_file_name)
if not image_stored_locally:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_LOCALLY ",
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': False,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
image_stored_to_aws = we_vote_image_manager.store_image_to_aws(
we_vote_image_file_name, we_vote_image_file_location, image_format)
if not image_stored_to_aws:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_TO_AWS ",
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': False,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
we_vote_image_url = "https://{bucket_name}.s3.amazonaws.com/{we_vote_image_file_location}" \
"".format(bucket_name=AWS_STORAGE_BUCKET_NAME,
we_vote_image_file_location=we_vote_image_file_location)
save_aws_info = we_vote_image_manager.save_we_vote_image_aws_info(
we_vote_image, we_vote_image_url,
we_vote_image_file_location,
we_vote_parent_image_id, is_active_version)
status += " IMAGE_STORED_TO_AWS " + save_aws_info['status'] + " "
success = save_aws_info['success']
if not success:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
kind_of_image_large = not kind_of_image_original
save_source_info_results = we_vote_image_manager.save_we_vote_image_campaignx_info(
we_vote_image=we_vote_image,
image_width=analyze_source_images_results['image_width'],
image_height=analyze_source_images_results['image_height'],
image_url_https=we_vote_image.we_vote_image_url,
same_day_image_version=same_day_image_version,
image_url_valid=image_url_valid)
status += " " + save_source_info_results['status']
if not save_source_info_results['success']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': False,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
# set active version False for other master images for same campaignx
set_active_version_false_results = we_vote_image_manager.set_active_version_false_for_other_images(
campaignx_we_vote_id=campaignx_we_vote_id,
image_url_https=we_vote_image.we_vote_image_url,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo)
status += set_active_version_false_results['status']
results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': we_vote_image
}
return results
def retrieve_all_images_for_one_issue(issue_we_vote_id):
"""
Retrieve all cached images for one issue
:param issue_we_vote_id:
:return:
"""
we_vote_image_list = []
we_vote_image_manager = WeVoteImageManager()
if issue_we_vote_id:
# if issue_we_vote_id is defined then retrieve cached images for that issue only
we_vote_image_list_results = we_vote_image_manager.\
retrieve_we_vote_image_list_from_we_vote_id(issue_we_vote_id=issue_we_vote_id)
we_vote_image_list_query = we_vote_image_list_results['we_vote_image_list']
we_vote_image_list = list(we_vote_image_list_query)
return we_vote_image_list
def retrieve_all_images_for_one_voter(voter_id):
"""
Retrieve all cached images for one voter
:param voter_id:
:return:
"""
we_vote_image_list = []
voter_manager = VoterManager()
we_vote_image_manager = WeVoteImageManager()
if voter_id:
# if voter_id is defined then retrieve cached images for that voter only
voter_results = voter_manager.retrieve_voter_by_id(voter_id)
if voter_results['success']:
voter_we_vote_id = voter_results['voter'].we_vote_id
we_vote_image_list_results = we_vote_image_manager.\
retrieve_we_vote_image_list_from_we_vote_id(voter_we_vote_id)
we_vote_image_list_query = we_vote_image_list_results['we_vote_image_list']
we_vote_image_list = list(we_vote_image_list_query)
return we_vote_image_list
def create_resized_image_if_not_created(we_vote_image):
"""
Create resized images only if not created for we_vote_image object
:param we_vote_image:
:return:
"""
voter_we_vote_id = we_vote_image.voter_we_vote_id
campaignx_we_vote_id = we_vote_image.campaignx_we_vote_id
candidate_we_vote_id = we_vote_image.candidate_we_vote_id
organization_we_vote_id = we_vote_image.organization_we_vote_id
google_civic_election_id = we_vote_image.google_civic_election_id
we_vote_parent_image_id = we_vote_image.id
twitter_id = we_vote_image.twitter_id
facebook_user_id = we_vote_image.facebook_user_id
maplight_id = we_vote_image.maplight_id
vote_smart_id = we_vote_image.vote_smart_id
other_source = we_vote_image.other_source
kind_of_image_twitter_profile = we_vote_image.kind_of_image_twitter_profile
kind_of_image_twitter_background = we_vote_image.kind_of_image_twitter_background
kind_of_image_twitter_banner = we_vote_image.kind_of_image_twitter_banner
kind_of_image_facebook_profile = we_vote_image.kind_of_image_facebook_profile
kind_of_image_facebook_background = we_vote_image.kind_of_image_facebook_background
kind_of_image_maplight = we_vote_image.kind_of_image_maplight
kind_of_image_vote_smart = we_vote_image.kind_of_image_vote_smart
kind_of_image_ballotpedia_profile = we_vote_image.kind_of_image_ballotpedia_profile
kind_of_image_campaignx_photo = we_vote_image.kind_of_image_campaignx_photo
kind_of_image_linkedin_profile = we_vote_image.kind_of_image_linkedin_profile
kind_of_image_wikipedia_profile = we_vote_image.kind_of_image_wikipedia_profile
kind_of_image_other_source = we_vote_image.kind_of_image_other_source
image_offset_x = we_vote_image.facebook_background_image_offset_x
image_offset_y = we_vote_image.facebook_background_image_offset_y
if positive_value_exists(we_vote_image.we_vote_image_file_location):
image_format = we_vote_image.we_vote_image_file_location.split(".")[-1]
else:
image_format = ""
create_resized_image_results = {
'voter_we_vote_id': voter_we_vote_id,
'campaignx_we_vote_id': campaignx_we_vote_id,
'candidate_we_vote_id': candidate_we_vote_id,
'organization_we_vote_id': organization_we_vote_id,
'cached_large_image': False,
'cached_medium_image': False,
'cached_tiny_image': False,
}
if we_vote_image.kind_of_image_twitter_profile:
image_url_https = we_vote_image.twitter_profile_image_url_https
elif we_vote_image.kind_of_image_twitter_background:
image_url_https = we_vote_image.twitter_profile_background_image_url_https
elif we_vote_image.kind_of_image_twitter_banner:
image_url_https = we_vote_image.twitter_profile_banner_url_https
elif we_vote_image.kind_of_image_facebook_profile:
image_url_https = we_vote_image.facebook_profile_image_url_https
elif we_vote_image.kind_of_image_facebook_background:
image_url_https = we_vote_image.facebook_background_image_url_https
elif we_vote_image.kind_of_image_maplight:
image_url_https = we_vote_image.maplight_image_url_https
elif we_vote_image.kind_of_image_vote_smart:
image_url_https = we_vote_image.vote_smart_image_url_https
elif we_vote_image.kind_of_image_ballotpedia_profile:
image_url_https = we_vote_image.ballotpedia_profile_image_url
elif we_vote_image.kind_of_image_campaignx_photo:
image_url_https = we_vote_image.campaignx_photo_url_https
elif we_vote_image.kind_of_image_linkedin_profile:
image_url_https = we_vote_image.linkedin_profile_image_url
elif we_vote_image.kind_of_image_wikipedia_profile:
image_url_https = we_vote_image.wikipedia_profile_image_url
elif we_vote_image.kind_of_image_other_source:
image_url_https = we_vote_image.other_source_image_url
else:
image_url_https = ''
# Check if resized image versions exist or not
resized_version_exists_results = check_resized_version_exists(
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
image_url_https=image_url_https,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source
)
if not resized_version_exists_results['large_image_version_exists']:
# Large version does not exist so create resize image and cache it
cache_resized_image_locally_results = cache_resized_image_locally(
google_civic_election_id,
image_url_https,
we_vote_parent_image_id,
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_id=twitter_id,
facebook_user_id=facebook_user_id,
maplight_id=maplight_id,
vote_smart_id=vote_smart_id,
image_format=image_format,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_large=True,
image_offset_x=image_offset_x,
image_offset_y=image_offset_y,
other_source=other_source)
create_resized_image_results['cached_large_image'] = cache_resized_image_locally_results['success']
else:
create_resized_image_results['cached_large_image'] = IMAGE_ALREADY_CACHED
# Only some of our kinds of images have medium or tiny sizes
if we_vote_image.kind_of_image_campaignx_photo or \
we_vote_image.kind_of_image_twitter_profile or \
we_vote_image.kind_of_image_facebook_profile or \
we_vote_image.kind_of_image_maplight or \
we_vote_image.kind_of_image_vote_smart or \
we_vote_image.kind_of_image_ballotpedia_profile or \
we_vote_image.kind_of_image_linkedin_profile or \
we_vote_image.kind_of_image_wikipedia_profile or \
we_vote_image.kind_of_image_other_source:
if not resized_version_exists_results['medium_image_version_exists']:
# Medium version does not exist so create resize image and cache it
cache_resized_image_locally_results = cache_resized_image_locally(
google_civic_election_id, image_url_https, we_vote_parent_image_id,
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_id=twitter_id,
facebook_user_id=facebook_user_id,
maplight_id=maplight_id,
vote_smart_id=vote_smart_id,
image_format=image_format,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_medium=True,
image_offset_x=image_offset_x,
image_offset_y=image_offset_y,
other_source=other_source)
create_resized_image_results['cached_medium_image'] = cache_resized_image_locally_results['success']
else:
create_resized_image_results['cached_medium_image'] = IMAGE_ALREADY_CACHED
if not resized_version_exists_results['tiny_image_version_exists']:
# Tiny version does not exist so create resize image and cache it
cache_resized_image_locally_results = cache_resized_image_locally(
google_civic_election_id, image_url_https, we_vote_parent_image_id,
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_id=twitter_id,
facebook_user_id=facebook_user_id,
maplight_id=maplight_id,
vote_smart_id=vote_smart_id,
image_format=image_format,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_tiny=True,
image_offset_x=image_offset_x,
image_offset_y=image_offset_y,
other_source=other_source)
create_resized_image_results['cached_tiny_image'] = cache_resized_image_locally_results['success']
else:
create_resized_image_results['cached_tiny_image'] = IMAGE_ALREADY_CACHED
return create_resized_image_results
def check_resized_version_exists(
voter_we_vote_id=None,
campaignx_we_vote_id=None,
candidate_we_vote_id=None,
organization_we_vote_id=None,
image_url_https=None,
kind_of_image_twitter_profile=False,
kind_of_image_twitter_background=False,
kind_of_image_twitter_banner=False,
kind_of_image_facebook_profile=False,
kind_of_image_facebook_background=False,
kind_of_image_maplight=False,
kind_of_image_vote_smart=False,
kind_of_image_campaignx_photo=False,
kind_of_image_ballotpedia_profile=False,
kind_of_image_linkedin_profile=False,
kind_of_image_wikipedia_profile=False,
kind_of_image_other_source=False):
"""
Check if large, medium or tiny image versions already exist or not
:param voter_we_vote_id:
:param campaignx_we_vote_id:
:param candidate_we_vote_id:
:param organization_we_vote_id:
:param image_url_https:
:param kind_of_image_twitter_profile:
:param kind_of_image_twitter_background:
:param kind_of_image_twitter_banner:
:param kind_of_image_facebook_profile:
:param kind_of_image_facebook_background:
:param kind_of_image_maplight:
:param kind_of_image_vote_smart:
:param kind_of_image_ballotpedia_profile:
:param kind_of_image_campaignx_photo:
:param kind_of_image_linkedin_profile:
:param kind_of_image_wikipedia_profile:
:param kind_of_image_other_source:
:return:
"""
results = {
'medium_image_version_exists': False,
'tiny_image_version_exists': False,
'large_image_version_exists': False
}
we_vote_image_list_results = {
'we_vote_image_list': [],
}
we_vote_image_manager = WeVoteImageManager()
if kind_of_image_twitter_profile:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
twitter_profile_image_url_https=image_url_https)
elif kind_of_image_twitter_background:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
twitter_profile_background_image_url_https=image_url_https)
elif kind_of_image_twitter_banner:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
twitter_profile_banner_url_https=image_url_https)
elif kind_of_image_facebook_profile:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
facebook_profile_image_url_https=image_url_https)
elif kind_of_image_facebook_background:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
facebook_background_image_url_https=image_url_https)
elif kind_of_image_maplight:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
maplight_image_url_https=image_url_https)
elif kind_of_image_vote_smart:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
vote_smart_image_url_https=image_url_https)
elif kind_of_image_ballotpedia_profile:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
ballotpedia_profile_image_url=image_url_https)
elif kind_of_image_campaignx_photo:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
campaignx_we_vote_id,
campaignx_photo_url_https=image_url_https)
elif kind_of_image_linkedin_profile:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
linkedin_profile_image_url=image_url_https)
elif kind_of_image_wikipedia_profile:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
wikipedia_profile_image_url=image_url_https)
elif kind_of_image_other_source:
we_vote_image_list_results = we_vote_image_manager.retrieve_we_vote_image_list_from_url(
voter_we_vote_id, candidate_we_vote_id, organization_we_vote_id,
other_source_image_url=image_url_https)
we_vote_image_list = we_vote_image_list_results['we_vote_image_list']
for we_vote_image in we_vote_image_list:
if we_vote_image.we_vote_image_url is None or we_vote_image.we_vote_image_url == "":
# if we_vote_image_url is empty then delete that entry
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
elif we_vote_image.kind_of_image_medium:
results['medium_image_version_exists'] = True
elif we_vote_image.kind_of_image_tiny:
results['tiny_image_version_exists'] = True
elif we_vote_image.kind_of_image_large:
results['large_image_version_exists'] = True
return results
def cache_resized_image_locally(
google_civic_election_id,
image_url_https,
we_vote_parent_image_id,
voter_we_vote_id=None,
candidate_we_vote_id=None,
campaignx_we_vote_id=None,
organization_we_vote_id=None,
issue_we_vote_id=None,
twitter_id=None,
image_format=None,
facebook_user_id=None,
other_source=None,
maplight_id=None,
vote_smart_id=None,
is_active_version=True,
kind_of_image_twitter_profile=False,
kind_of_image_twitter_background=False,
kind_of_image_twitter_banner=False,
kind_of_image_facebook_profile=False,
kind_of_image_facebook_background=False,
kind_of_image_maplight=False,
kind_of_image_vote_smart=False,
kind_of_image_issue=False,
kind_of_image_ballotpedia_profile=False,
kind_of_image_campaignx_photo=False,
kind_of_image_linkedin_profile=False,
kind_of_image_wikipedia_profile=False,
kind_of_image_other_source=False,
kind_of_image_original=False,
kind_of_image_large=False,
kind_of_image_medium=False,
kind_of_image_tiny=False,
image_offset_x=0,
image_offset_y=0):
"""
Resize the image as per image version and cache the same
:param google_civic_election_id:
:param image_url_https:
:param we_vote_parent_image_id:
:param voter_we_vote_id:
:param campaignx_we_vote_id:
:param candidate_we_vote_id:
:param organization_we_vote_id:
:param issue_we_vote_id:
:param twitter_id:
:param image_format:
:param facebook_user_id:
:param other_source: # can be MapLight or VoteSmart
:param maplight_id:
:param vote_smart_id:
:param is_active_version:
:param kind_of_image_twitter_profile:
:param kind_of_image_twitter_background:
:param kind_of_image_twitter_banner:
:param kind_of_image_facebook_profile:
:param kind_of_image_facebook_background:
:param kind_of_image_maplight:
:param kind_of_image_vote_smart:
:param kind_of_image_issue:
:param kind_of_image_ballotpedia_profile:
:param kind_of_image_campaignx_photo:
:param kind_of_image_linkedin_profile:
:param kind_of_image_wikipedia_profile:
:param kind_of_image_other_source:
:param kind_of_image_original:
:param kind_of_image_large:
:param kind_of_image_medium:
:param kind_of_image_tiny:
:param image_offset_x: # For Facebook background
:param image_offset_y: # For Facebook background
:return:
"""
success = False
status = ''
we_vote_image_created = False
resized_image_created = False
image_stored_from_source = False
image_stored_locally = False
image_stored_to_aws = False
image_versions = []
we_vote_image_file_location = None
we_vote_image_manager = WeVoteImageManager()
# Set up image we will use for large, medium or tiny
create_we_vote_image_results = we_vote_image_manager.create_we_vote_image(
google_civic_election_id=google_civic_election_id,
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_original=kind_of_image_original,
kind_of_image_large=kind_of_image_large,
kind_of_image_medium=kind_of_image_medium,
kind_of_image_tiny=kind_of_image_tiny)
status += create_we_vote_image_results['status']
if not create_we_vote_image_results['we_vote_image_saved']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'resized_image_created': resized_image_created,
'image_stored_to_aws': image_stored_to_aws,
}
return error_results
we_vote_image_created = True
we_vote_image = create_we_vote_image_results['we_vote_image']
image_width = ''
image_height = ''
if kind_of_image_issue:
if kind_of_image_large:
image_width = ISSUES_IMAGE_LARGE_WIDTH
image_height = ISSUES_IMAGE_LARGE_HEIGHT
elif kind_of_image_medium:
image_width = ISSUES_IMAGE_MEDIUM_WIDTH
image_height = ISSUES_IMAGE_MEDIUM_HEIGHT
elif kind_of_image_tiny:
image_width = ISSUES_IMAGE_TINY_WIDTH
image_height = ISSUES_IMAGE_TINY_HEIGHT
elif kind_of_image_campaignx_photo:
if kind_of_image_large:
image_width = CAMPAIGN_PHOTO_LARGE_MAX_WIDTH
image_height = CAMPAIGN_PHOTO_LARGE_MAX_HEIGHT
elif kind_of_image_medium:
image_width = CAMPAIGN_PHOTO_MEDIUM_MAX_WIDTH
image_height = CAMPAIGN_PHOTO_MEDIUM_MAX_HEIGHT
elif kind_of_image_tiny:
image_width = CAMPAIGN_PHOTO_SMALL_MAX_WIDTH
image_height = CAMPAIGN_PHOTO_SMALL_MAX_HEIGHT
else:
if kind_of_image_large:
image_width = PROFILE_IMAGE_LARGE_WIDTH
image_height = PROFILE_IMAGE_LARGE_HEIGHT
elif kind_of_image_medium:
image_width = PROFILE_IMAGE_MEDIUM_WIDTH
image_height = PROFILE_IMAGE_MEDIUM_HEIGHT
elif kind_of_image_tiny:
image_width = PROFILE_IMAGE_TINY_WIDTH
image_height = PROFILE_IMAGE_TINY_HEIGHT
if kind_of_image_twitter_profile:
image_type = TWITTER_PROFILE_IMAGE_NAME
elif kind_of_image_twitter_background:
image_type = TWITTER_BACKGROUND_IMAGE_NAME
elif kind_of_image_twitter_banner:
image_type = TWITTER_BANNER_IMAGE_NAME
elif kind_of_image_facebook_profile:
image_type = FACEBOOK_PROFILE_IMAGE_NAME
elif kind_of_image_facebook_background:
image_type = FACEBOOK_BACKGROUND_IMAGE_NAME
image_height = SOCIAL_BACKGROUND_IMAGE_HEIGHT
image_width = SOCIAL_BACKGROUND_IMAGE_WIDTH
elif kind_of_image_maplight:
image_type = MAPLIGHT_IMAGE_NAME
elif kind_of_image_vote_smart:
image_type = VOTE_SMART_IMAGE_NAME
elif kind_of_image_issue:
image_type = ISSUE_IMAGE_NAME
elif kind_of_image_ballotpedia_profile:
image_type = BALLOTPEDIA_IMAGE_NAME
elif kind_of_image_campaignx_photo:
image_type = CAMPAIGNX_PHOTO_IMAGE_NAME
elif kind_of_image_linkedin_profile:
image_type = LINKEDIN_IMAGE_NAME
elif kind_of_image_wikipedia_profile:
image_type = WIKIPEDIA_IMAGE_NAME
elif kind_of_image_other_source:
image_type = other_source
else:
image_type = ''
# Get today's cached images and their versions so that image version can be calculated
cached_todays_we_vote_image_list_results = we_vote_image_manager.retrieve_todays_cached_we_vote_image_list(
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
issue_we_vote_id=issue_we_vote_id,
kind_of_image_twitter_profile=kind_of_image_twitter_profile,
kind_of_image_twitter_background=kind_of_image_twitter_background,
kind_of_image_twitter_banner=kind_of_image_twitter_banner,
kind_of_image_facebook_profile=kind_of_image_facebook_profile,
kind_of_image_facebook_background=kind_of_image_facebook_background,
kind_of_image_maplight=kind_of_image_maplight,
kind_of_image_vote_smart=kind_of_image_vote_smart,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_ballotpedia_profile=kind_of_image_ballotpedia_profile,
kind_of_image_campaignx_photo=kind_of_image_campaignx_photo,
kind_of_image_linkedin_profile=kind_of_image_linkedin_profile,
kind_of_image_wikipedia_profile=kind_of_image_wikipedia_profile,
kind_of_image_other_source=kind_of_image_other_source,
kind_of_image_original=kind_of_image_original,
kind_of_image_large=kind_of_image_large,
kind_of_image_medium=kind_of_image_medium,
kind_of_image_tiny=kind_of_image_tiny)
for cached_we_vote_image in cached_todays_we_vote_image_list_results['we_vote_image_list']:
if cached_we_vote_image.same_day_image_version:
image_versions.append(cached_we_vote_image.same_day_image_version)
if image_versions:
same_day_image_version = max(image_versions) + 1
else:
same_day_image_version = 1
if kind_of_image_facebook_profile or kind_of_image_facebook_background:
# image url is valid so store source image of facebook to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_facebook_info(
we_vote_image, facebook_user_id, image_width, image_height,
image_url_https, same_day_image_version, kind_of_image_facebook_profile,
kind_of_image_facebook_background)
elif kind_of_image_twitter_profile or kind_of_image_twitter_background or kind_of_image_twitter_banner:
# image url is valid so store source image of twitter to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_twitter_info(
we_vote_image, twitter_id, image_width, image_height, image_url_https, same_day_image_version,
kind_of_image_twitter_profile, kind_of_image_twitter_background, kind_of_image_twitter_banner)
elif kind_of_image_maplight:
# image url is valid so store source image of maplight to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_maplight_info(
we_vote_image, maplight_id, image_width, image_height, image_url_https, same_day_image_version,
kind_of_image_maplight)
elif kind_of_image_vote_smart:
# image url is valid so store source image of maplight to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_vote_smart_info(
we_vote_image, vote_smart_id, image_width, image_height, image_url_https, same_day_image_version,
kind_of_image_vote_smart)
elif kind_of_image_issue:
# image url is valid so store source image of issue to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_issue_info(
we_vote_image, image_width, image_height, image_url_https, same_day_image_version)
elif kind_of_image_ballotpedia_profile:
# image url is valid so store source image of ballotpedia to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_ballotpedia_info(
we_vote_image, image_width, image_height, image_url_https, same_day_image_version,
kind_of_image_ballotpedia_profile)
elif kind_of_image_campaignx_photo:
# Update this new image with width, height, original url and version number
save_source_info_results = we_vote_image_manager.save_we_vote_image_campaignx_info(
we_vote_image=we_vote_image,
image_width=image_width,
image_height=image_height,
image_url_https=image_url_https,
same_day_image_version=same_day_image_version,
)
elif kind_of_image_linkedin_profile:
# image url is valid so store source image of linkedin to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_linkedin_info(
we_vote_image, image_width, image_height, image_url_https, same_day_image_version,
kind_of_image_linkedin_profile)
elif kind_of_image_wikipedia_profile:
# image url is valid so store source image of wikipedia to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_wikipedia_info(
we_vote_image, image_width, image_height, image_url_https, same_day_image_version,
kind_of_image_wikipedia_profile)
elif kind_of_image_other_source:
# image url is valid so store source image from other source to WeVoteImage
save_source_info_results = we_vote_image_manager.save_we_vote_image_other_source_info(
we_vote_image, image_width, image_height, other_source, image_url_https, same_day_image_version,
kind_of_image_other_source)
else:
save_source_info_results = {
'status': "KIND_OF_IMAGE_INVALID ",
'success': False,
'we_vote_image': None,
}
status += " " + save_source_info_results['status']
if save_source_info_results['success']:
image_stored_from_source = True
date_image_saved = "{year}{:02d}{:02d}".format(we_vote_image.date_image_saved.month,
we_vote_image.date_image_saved.day,
year=we_vote_image.date_image_saved.year)
# ex twitter_profile_image_master-2017210_1_48x48.png
we_vote_image_file_name = "{image_type}-{date_image_saved}_{counter}_" \
"{image_width}x{image_height}.{image_format}" \
"".format(image_type=image_type,
date_image_saved=date_image_saved,
counter=str(same_day_image_version),
image_width=str(image_width),
image_height=str(image_height),
image_format=str(image_format))
if voter_we_vote_id:
we_vote_image_file_location = voter_we_vote_id + "/" + we_vote_image_file_name
elif campaignx_we_vote_id:
we_vote_image_file_location = campaignx_we_vote_id + "/" + we_vote_image_file_name
elif candidate_we_vote_id:
we_vote_image_file_location = candidate_we_vote_id + "/" + we_vote_image_file_name
elif organization_we_vote_id:
we_vote_image_file_location = organization_we_vote_id + "/" + we_vote_image_file_name
elif issue_we_vote_id:
we_vote_image_file_location = issue_we_vote_id + "/" + we_vote_image_file_name
image_stored_locally = we_vote_image_manager.store_image_locally(
image_url_https, we_vote_image_file_name)
if not image_stored_locally:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_LOCALLY ",
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': False,
'resized_image_created': resized_image_created,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
status += " IMAGE_STORED_LOCALLY "
resized_image_created = we_vote_image_manager.resize_we_vote_master_image(
image_local_path=we_vote_image_file_name,
image_width=image_width,
image_height=image_height,
image_type=image_type,
image_offset_x=image_offset_x,
image_offset_y=image_offset_y)
if not resized_image_created:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_LOCALLY ",
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'resized_image_created': False,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
status += " RESIZED_IMAGE_CREATED "
image_stored_to_aws = we_vote_image_manager.store_image_to_aws(
we_vote_image_file_name, we_vote_image_file_location, image_format)
if not image_stored_to_aws:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_TO_AWS",
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'resized_image_created': resized_image_created,
'image_stored_to_aws': False,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
we_vote_image_url = "https://{bucket_name}.s3.amazonaws.com/{we_vote_image_file_location}" \
"".format(bucket_name=AWS_STORAGE_BUCKET_NAME,
we_vote_image_file_location=we_vote_image_file_location)
# if we_vote_image_url is not empty then save we_vote_image_wes_info else delete we_vote_image entry
if we_vote_image_url is not None and we_vote_image_url != "":
save_aws_info = we_vote_image_manager.save_we_vote_image_aws_info(
we_vote_image=we_vote_image,
we_vote_image_url=we_vote_image_url,
we_vote_image_file_location=we_vote_image_file_location,
we_vote_parent_image_id=we_vote_parent_image_id,
is_active_version=is_active_version)
else:
status += " WE_VOTE_IMAGE_URL_IS_EMPTY"
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'resized_image_created': resized_image_created,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
status += " IMAGE_STORED_TO_AWS " + save_aws_info['status']
success = save_aws_info['success']
if not success:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'resized_image_created': resized_image_created,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
else:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': False,
'image_stored_locally': image_stored_locally,
'resized_image_created': resized_image_created,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': image_stored_locally,
'resized_image_created': resized_image_created,
'image_stored_to_aws': image_stored_to_aws,
}
return results
def create_resized_images(
voter_we_vote_id=None,
campaignx_we_vote_id=None,
candidate_we_vote_id=None,
organization_we_vote_id=None,
twitter_profile_image_url_https=None,
twitter_profile_background_image_url_https=None,
twitter_profile_banner_url_https=None,
facebook_profile_image_url_https=None,
facebook_background_image_url_https=None,
maplight_image_url_https=None,
vote_smart_image_url_https=None,
ballotpedia_profile_image_url=None,
campaignx_photo_url_https=None,
linkedin_profile_image_url=None,
wikipedia_profile_image_url=None,
other_source_image_url=None):
"""
Create resized images
:param voter_we_vote_id:
:param campaignx_we_vote_id:
:param candidate_we_vote_id:
:param organization_we_vote_id:
:param twitter_profile_image_url_https:
:param twitter_profile_background_image_url_https:
:param twitter_profile_banner_url_https:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:param maplight_image_url_https:
:param vote_smart_image_url_https:
:param ballotpedia_profile_image_url:
:param campaignx_photo_url_https:
:param linkedin_profile_image_url:
:param wikipedia_profile_image_url:
:param other_source_image_url:
:return:
"""
cached_master_image_url = None
cached_resized_image_url_large = None
cached_resized_image_url_medium = None
cached_resized_image_url_tiny = None
we_vote_image_manager = WeVoteImageManager()
# Retrieve cached master image url from WeVoteImage table
cached_we_vote_image_results = we_vote_image_manager.retrieve_we_vote_image_from_url(
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_profile_image_url_https=twitter_profile_image_url_https,
twitter_profile_background_image_url_https=twitter_profile_background_image_url_https,
twitter_profile_banner_url_https=twitter_profile_banner_url_https,
facebook_profile_image_url_https=facebook_profile_image_url_https,
facebook_background_image_url_https=facebook_background_image_url_https,
maplight_image_url_https=maplight_image_url_https,
vote_smart_image_url_https=vote_smart_image_url_https,
ballotpedia_profile_image_url=ballotpedia_profile_image_url,
campaignx_photo_url_https=campaignx_photo_url_https,
linkedin_profile_image_url=linkedin_profile_image_url,
wikipedia_profile_image_url=wikipedia_profile_image_url,
other_source_image_url=other_source_image_url,
kind_of_image_original=True)
if cached_we_vote_image_results['success']:
cached_we_vote_image = cached_we_vote_image_results['we_vote_image']
cached_master_image_url = cached_we_vote_image.we_vote_image_url
# Create resized image if not created before
create_resized_image_results = create_resized_image_if_not_created(cached_we_vote_image)
# Retrieve resized large version image url
if create_resized_image_results['cached_large_image']:
cached_resized_we_vote_image_results = we_vote_image_manager.retrieve_we_vote_image_from_url(
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_profile_image_url_https=twitter_profile_image_url_https,
twitter_profile_background_image_url_https=twitter_profile_background_image_url_https,
twitter_profile_banner_url_https=twitter_profile_banner_url_https,
facebook_profile_image_url_https=facebook_profile_image_url_https,
facebook_background_image_url_https=facebook_background_image_url_https,
maplight_image_url_https=maplight_image_url_https,
vote_smart_image_url_https=vote_smart_image_url_https,
ballotpedia_profile_image_url=ballotpedia_profile_image_url,
campaignx_photo_url_https=campaignx_photo_url_https,
linkedin_profile_image_url=linkedin_profile_image_url,
wikipedia_profile_image_url=wikipedia_profile_image_url,
other_source_image_url=other_source_image_url,
kind_of_image_large=True)
if cached_resized_we_vote_image_results['success']:
cached_resized_we_vote_image = cached_resized_we_vote_image_results['we_vote_image']
cached_resized_image_url_large = cached_resized_we_vote_image.we_vote_image_url
if create_resized_image_results['cached_medium_image']:
# Retrieve resized medium version image url
cached_resized_we_vote_image_results = we_vote_image_manager.retrieve_we_vote_image_from_url(
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_profile_image_url_https=twitter_profile_image_url_https,
twitter_profile_background_image_url_https=twitter_profile_background_image_url_https,
twitter_profile_banner_url_https=twitter_profile_banner_url_https,
facebook_profile_image_url_https=facebook_profile_image_url_https,
facebook_background_image_url_https=facebook_background_image_url_https,
maplight_image_url_https=maplight_image_url_https,
vote_smart_image_url_https=vote_smart_image_url_https,
ballotpedia_profile_image_url=ballotpedia_profile_image_url,
campaignx_photo_url_https=campaignx_photo_url_https,
linkedin_profile_image_url=linkedin_profile_image_url,
wikipedia_profile_image_url=wikipedia_profile_image_url,
other_source_image_url=other_source_image_url,
kind_of_image_medium=True)
if cached_resized_we_vote_image_results['success']:
cached_resized_we_vote_image = cached_resized_we_vote_image_results['we_vote_image']
cached_resized_image_url_medium = cached_resized_we_vote_image.we_vote_image_url
if create_resized_image_results['cached_tiny_image']:
# Retrieve resized tiny version image url
cached_resized_we_vote_image_results = we_vote_image_manager.retrieve_we_vote_image_from_url(
voter_we_vote_id=voter_we_vote_id,
campaignx_we_vote_id=campaignx_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_profile_image_url_https=twitter_profile_image_url_https,
twitter_profile_background_image_url_https=twitter_profile_background_image_url_https,
twitter_profile_banner_url_https=twitter_profile_banner_url_https,
facebook_profile_image_url_https=facebook_profile_image_url_https,
facebook_background_image_url_https=facebook_background_image_url_https,
maplight_image_url_https=maplight_image_url_https,
vote_smart_image_url_https=vote_smart_image_url_https,
ballotpedia_profile_image_url=ballotpedia_profile_image_url,
campaignx_photo_url_https=campaignx_photo_url_https,
linkedin_profile_image_url=linkedin_profile_image_url,
wikipedia_profile_image_url=wikipedia_profile_image_url, other_source_image_url=other_source_image_url,
kind_of_image_tiny=True)
if cached_resized_we_vote_image_results['success']:
cached_resized_we_vote_image = cached_resized_we_vote_image_results['we_vote_image']
cached_resized_image_url_tiny = cached_resized_we_vote_image.we_vote_image_url
results = {
'cached_master_image_url': cached_master_image_url,
'cached_resized_image_url_large': cached_resized_image_url_large,
'cached_resized_image_url_medium': cached_resized_image_url_medium,
'cached_resized_image_url_tiny': cached_resized_image_url_tiny
}
return results
def cache_master_and_resized_image(
twitter_id=None,
twitter_screen_name=None,
twitter_profile_image_url_https=None,
twitter_profile_background_image_url_https=None,
twitter_profile_banner_url_https=None,
voter_id=None,
voter_we_vote_id=None,
candidate_id=None,
candidate_we_vote_id=None,
organization_id=None,
organization_we_vote_id=None,
image_source=None,
facebook_user_id=None,
facebook_profile_image_url_https=None,
facebook_background_image_url_https=None,
facebook_background_image_offset_x=None,
facebook_background_image_offset_y=None,
maplight_id=None,
vote_smart_id=None,
maplight_image_url_https=None,
vote_smart_image_url_https=None,
ballotpedia_profile_image_url=None,
linkedin_profile_image_url=None,
wikipedia_profile_image_url=None,
other_source_image_url=None,
other_source=None):
"""
Start with URL of image hosted on another server, cache it on the We Vote network,
as well as re-sized images. Return cached urls
:param twitter_id:
:param twitter_screen_name:
:param twitter_profile_image_url_https:
:param twitter_profile_background_image_url_https:
:param twitter_profile_banner_url_https:
:param voter_id:
:param voter_we_vote_id:
:param candidate_id:
:param candidate_we_vote_id:
:param organization_id:
:param organization_we_vote_id:
:param image_source: 2017-12-12 Currently not used within WeVoteServer
:param facebook_user_id:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:param facebook_background_image_offset_x:
:param facebook_background_image_offset_y:
:param maplight_id:
:param vote_smart_id:
:param maplight_image_url_https:
:param vote_smart_image_url_https:
:param ballotpedia_profile_image_url:
:param linkedin_profile_image_url:
:param wikipedia_profile_image_url:
:param other_source_image_url:
:param other_source:
:return:
"""
cached_twitter_profile_image_url_https = None
cached_twitter_profile_background_image_url_https = None
cached_twitter_profile_background_image_url_large = None
cached_twitter_profile_banner_url_https = None
cached_twitter_profile_banner_url_large = None
cached_facebook_profile_image_url_https = None
cached_facebook_background_image_url_https = None
cached_facebook_background_image_url_large = None
cached_maplight_image_url_https = None
cached_vote_smart_image_url_https = None
cached_ballotpedia_image_url_https = None
cached_linkedin_image_url_https = None
cached_wikipedia_image_url_https = None
cached_other_source_image_url_https = None
we_vote_hosted_profile_image_url_large = None
we_vote_hosted_profile_image_url_medium = None
we_vote_hosted_profile_image_url_tiny = None
# caching refreshed new images to s3 aws
cache_master_images_results = cache_master_images(
voter_id=voter_id,
voter_we_vote_id=voter_we_vote_id,
candidate_id=candidate_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_id=organization_id,
organization_we_vote_id=organization_we_vote_id,
twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name,
twitter_profile_image_url_https=twitter_profile_image_url_https,
twitter_profile_background_image_url_https=twitter_profile_background_image_url_https,
twitter_profile_banner_url_https=twitter_profile_banner_url_https,
facebook_user_id=facebook_user_id,
facebook_profile_image_url_https=facebook_profile_image_url_https,
facebook_background_image_url_https=facebook_background_image_url_https,
facebook_background_image_offset_x=facebook_background_image_offset_x,
facebook_background_image_offset_y=facebook_background_image_offset_y,
image_source=image_source,
maplight_id=maplight_id,
maplight_image_url_https=maplight_image_url_https,
vote_smart_id=vote_smart_id,
vote_smart_image_url_https=vote_smart_image_url_https,
ballotpedia_profile_image_url=ballotpedia_profile_image_url,
linkedin_profile_image_url=linkedin_profile_image_url,
wikipedia_profile_image_url=wikipedia_profile_image_url,
other_source_image_url=other_source_image_url,
other_source=other_source)
# If cached master image or image is already cached then create all resized images for master image
if cache_master_images_results['cached_twitter_profile_image'] is True or \
cache_master_images_results['cached_twitter_profile_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_profile_image_url_https=twitter_profile_image_url_https)
cached_twitter_profile_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
if cache_master_images_results['cached_twitter_background_image'] is True or \
cache_master_images_results['cached_twitter_background_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_profile_background_image_url_https=twitter_profile_background_image_url_https)
cached_twitter_profile_background_image_url_https = create_resized_image_results['cached_master_image_url']
cached_twitter_profile_background_image_url_large = \
create_resized_image_results['cached_resized_image_url_large']
if cache_master_images_results['cached_twitter_banner_image'] is True or \
cache_master_images_results['cached_twitter_banner_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
twitter_profile_banner_url_https=twitter_profile_banner_url_https)
cached_twitter_profile_banner_url_https = create_resized_image_results['cached_master_image_url']
cached_twitter_profile_banner_url_large = create_resized_image_results['cached_resized_image_url_large']
if cache_master_images_results['cached_facebook_profile_image'] is True or \
cache_master_images_results['cached_facebook_profile_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
facebook_profile_image_url_https=facebook_profile_image_url_https)
cached_facebook_profile_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
if cache_master_images_results['cached_facebook_background_image'] is True or \
cache_master_images_results['cached_facebook_background_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
facebook_background_image_url_https=facebook_background_image_url_https)
cached_facebook_background_image_url_https = create_resized_image_results['cached_master_image_url']
cached_facebook_background_image_url_large = create_resized_image_results['cached_resized_image_url_large']
if cache_master_images_results['cached_maplight_image'] is True or \
cache_master_images_results['cached_maplight_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
maplight_image_url_https=maplight_image_url_https)
cached_maplight_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
if cache_master_images_results['cached_vote_smart_image'] is True or \
cache_master_images_results['cached_vote_smart_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
vote_smart_image_url_https=vote_smart_image_url_https)
cached_vote_smart_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
if cache_master_images_results['cached_ballotpedia_image'] is True or \
cache_master_images_results['cached_ballotpedia_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
ballotpedia_profile_image_url=ballotpedia_profile_image_url)
cached_ballotpedia_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
if cache_master_images_results['cached_linkedin_image'] is True or \
cache_master_images_results['cached_linkedin_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
linkedin_profile_image_url=linkedin_profile_image_url)
cached_linkedin_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
if cache_master_images_results['cached_wikipedia_image'] is True or \
cache_master_images_results['cached_wikipedia_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
wikipedia_profile_image_url=wikipedia_profile_image_url)
cached_wikipedia_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
if cache_master_images_results['cached_other_source_image'] is True or \
cache_master_images_results['cached_other_source_image'] == IMAGE_ALREADY_CACHED:
create_resized_image_results = create_resized_images(
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
other_source_image_url=other_source_image_url)
cached_other_source_image_url_https = create_resized_image_results['cached_master_image_url']
we_vote_hosted_profile_image_url_large = create_resized_image_results['cached_resized_image_url_large']
we_vote_hosted_profile_image_url_medium = create_resized_image_results['cached_resized_image_url_medium']
we_vote_hosted_profile_image_url_tiny = create_resized_image_results['cached_resized_image_url_tiny']
results = {
'cached_twitter_profile_image_url_https': cached_twitter_profile_image_url_https,
'cached_twitter_profile_background_image_url_https': cached_twitter_profile_background_image_url_https,
'cached_twitter_profile_background_image_url_large': cached_twitter_profile_background_image_url_large,
'cached_twitter_profile_banner_url_https': cached_twitter_profile_banner_url_https,
'cached_twitter_profile_banner_url_large': cached_twitter_profile_banner_url_large,
'cached_facebook_profile_image_url_https': cached_facebook_profile_image_url_https,
'cached_facebook_background_image_url_https': cached_facebook_background_image_url_https,
'cached_facebook_background_image_url_large': cached_facebook_background_image_url_large,
'cached_maplight_image_url_https': cached_maplight_image_url_https,
'cached_vote_smart_image_url_https': cached_vote_smart_image_url_https,
'cached_ballotpedia_image_url_https': cached_ballotpedia_image_url_https,
'cached_linkedin_image_url_https': cached_linkedin_image_url_https,
'cached_wikipedia_image_url_https': cached_wikipedia_image_url_https,
'cached_other_source_image_url_https': cached_other_source_image_url_https,
'we_vote_hosted_profile_image_url_large': we_vote_hosted_profile_image_url_large,
'we_vote_hosted_profile_image_url_medium': we_vote_hosted_profile_image_url_medium,
'we_vote_hosted_profile_image_url_tiny': we_vote_hosted_profile_image_url_tiny
}
return results
def cache_master_images(
twitter_id=None,
twitter_screen_name=None,
twitter_profile_image_url_https=None,
twitter_profile_background_image_url_https=None,
twitter_profile_banner_url_https=None,
voter_id=None,
voter_we_vote_id=None,
candidate_id=None,
candidate_we_vote_id=None,
organization_id=None,
organization_we_vote_id=None,
image_source=None, facebook_user_id=None,
facebook_profile_image_url_https=None,
facebook_background_image_url_https=None,
facebook_background_image_offset_x=None,
facebook_background_image_offset_y=None,
maplight_id=None,
vote_smart_id=None,
maplight_image_url_https=None,
vote_smart_image_url_https=None,
ballotpedia_profile_image_url=None,
linkedin_profile_image_url=None,
wikipedia_profile_image_url=None,
other_source_image_url=None,
other_source=None):
"""
Collect all kind of images from URLs hosted outside of the We Vote network, and cache them locally
for a candidate or an organization such as profile, background
:param twitter_id:
:param twitter_screen_name:
:param twitter_profile_image_url_https:
:param twitter_profile_background_image_url_https:
:param twitter_profile_banner_url_https:
:param voter_id:
:param voter_we_vote_id:
:param candidate_id:
:param candidate_we_vote_id:
:param organization_id:
:param organization_we_vote_id:
:param image_source: 2017-12-12 Currently not used within WeVoteServer
:param facebook_user_id:
:param facebook_profile_image_url_https:
:param facebook_background_image_url_https:
:param facebook_background_image_offset_x:
:param facebook_background_image_offset_y:
:param maplight_id:
:param maplight_image_url_https:
:param vote_smart_id:
:param vote_smart_image_url_https:
:param ballotpedia_profile_image_url:
:param linkedin_profile_image_url:
:param wikipedia_profile_image_url:
:param other_source_image_url:
:param other_source
:return:
"""
cache_all_kind_of_images_results = {
'image_source': image_source,
'voter_id': voter_id,
'voter_we_vote_id': voter_we_vote_id,
'candidate_id': candidate_id,
'candidate_we_vote_id': candidate_we_vote_id,
'organization_id': organization_id,
'organization_we_vote_id': organization_we_vote_id,
'cached_twitter_profile_image': False,
'cached_twitter_background_image': False,
'cached_twitter_banner_image': False,
'cached_facebook_profile_image': False,
'cached_facebook_background_image': False,
'cached_maplight_image': False,
'cached_vote_smart_image': False,
'cached_ballotpedia_image': False,
'cached_linkedin_image': False,
'cached_wikipedia_image': False,
'cached_other_source_image': False,
}
google_civic_election_id = 0
we_vote_image_manager = WeVoteImageManager()
if not twitter_profile_image_url_https:
cache_all_kind_of_images_results['cached_twitter_profile_image'] = TWITTER_URL_NOT_FOUND
else:
twitter_profile_image_url_https = we_vote_image_manager.twitter_profile_image_url_https_original(
twitter_profile_image_url_https)
if not twitter_profile_background_image_url_https:
cache_all_kind_of_images_results['cached_twitter_background_image'] = TWITTER_URL_NOT_FOUND
if not twitter_profile_banner_url_https:
cache_all_kind_of_images_results['cached_twitter_banner_image'] = TWITTER_URL_NOT_FOUND
if not facebook_profile_image_url_https:
cache_all_kind_of_images_results['cached_facebook_profile_image'] = FACEBOOK_URL_NOT_FOUND
if not facebook_background_image_url_https:
cache_all_kind_of_images_results['cached_facebook_background_image'] = FACEBOOK_URL_NOT_FOUND
if not maplight_image_url_https:
cache_all_kind_of_images_results['cached_maplight_image'] = MAPLIGHT_URL_NOT_FOUND
if not vote_smart_image_url_https:
cache_all_kind_of_images_results['cached_vote_smart_image'] = VOTE_SMART_URL_NOT_FOUND
if not ballotpedia_profile_image_url:
cache_all_kind_of_images_results['cached_ballotpedia_image'] = BALLOTPEDIA_URL_NOT_FOUND
if not linkedin_profile_image_url:
cache_all_kind_of_images_results['cached_linkedin_image'] = LINKEDIN_URL_NOT_FOUND
if not wikipedia_profile_image_url:
cache_all_kind_of_images_results['cached_wikipedia_image'] = WIKIPEDIA_URL_NOT_FOUND
if not other_source_image_url:
cache_all_kind_of_images_results['cached_other_source_image'] = OTHER_SOURCE_URL_NOT_FOUND
if twitter_profile_image_url_https:
cache_all_kind_of_images_results['cached_twitter_profile_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=twitter_profile_image_url_https, voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id, organization_we_vote_id=organization_we_vote_id,
twitter_id=twitter_id, twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_profile=True, kind_of_image_original=True)
if twitter_profile_background_image_url_https:
cache_all_kind_of_images_results['cached_twitter_background_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=twitter_profile_background_image_url_https,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_background=True, kind_of_image_original=True)
if twitter_profile_banner_url_https:
cache_all_kind_of_images_results['cached_twitter_banner_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=twitter_profile_banner_url_https,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, twitter_id=twitter_id,
twitter_screen_name=twitter_screen_name, is_active_version=True,
kind_of_image_twitter_banner=True, kind_of_image_original=True)
if facebook_profile_image_url_https:
cache_all_kind_of_images_results['cached_facebook_profile_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=facebook_profile_image_url_https,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, facebook_user_id=facebook_user_id,
is_active_version=True, kind_of_image_facebook_profile=True, kind_of_image_original=True)
if facebook_background_image_url_https:
cache_all_kind_of_images_results['cached_facebook_background_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=facebook_background_image_url_https,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, facebook_user_id=facebook_user_id,
is_active_version=True, kind_of_image_facebook_background=True,
facebook_background_image_offset_x=facebook_background_image_offset_x,
facebook_background_image_offset_y=facebook_background_image_offset_y,
kind_of_image_original=True)
if maplight_image_url_https:
cache_all_kind_of_images_results['cached_maplight_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=maplight_image_url_https,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, maplight_id=maplight_id,
is_active_version=True, kind_of_image_maplight=True, kind_of_image_original=True)
if vote_smart_image_url_https:
cache_all_kind_of_images_results['cached_vote_smart_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=vote_smart_image_url_https,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, vote_smart_id=vote_smart_id,
is_active_version=True, kind_of_image_vote_smart=True, kind_of_image_original=True)
if ballotpedia_profile_image_url:
cache_all_kind_of_images_results['cached_ballotpedia_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=ballotpedia_profile_image_url,
voter_we_vote_id=voter_we_vote_id,
candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id,
is_active_version=True,
kind_of_image_ballotpedia_profile=True,
kind_of_image_original=True)
if linkedin_profile_image_url:
cache_all_kind_of_images_results['cached_linkedin_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=linkedin_profile_image_url,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, is_active_version=True,
kind_of_image_linkedin_profile=True, kind_of_image_original=True)
if wikipedia_profile_image_url:
cache_all_kind_of_images_results['cached_wikipedia_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=wikipedia_profile_image_url,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, is_active_version=True,
kind_of_image_wikipedia_profile=True, kind_of_image_original=True)
if other_source_image_url:
cache_all_kind_of_images_results['cached_other_source_image'] = cache_image_if_not_cached(
google_civic_election_id=google_civic_election_id,
image_url_https=other_source_image_url,
voter_we_vote_id=voter_we_vote_id, candidate_we_vote_id=candidate_we_vote_id,
organization_we_vote_id=organization_we_vote_id, is_active_version=True,
kind_of_image_other_source=True, kind_of_image_original=True, other_source=other_source)
return cache_all_kind_of_images_results
def cache_issue_image_master(google_civic_election_id, issue_image_file, issue_we_vote_id=None,
kind_of_image_issue=False, kind_of_image_original=False):
"""
Cache master issue image to AWS. This function is a more focused version of cache_image_locally (which deals with
all of the standard photos like Facebook, or Twitter).
:param google_civic_election_id:
:param issue_image_file:
:param issue_we_vote_id:
:param kind_of_image_issue:
:param kind_of_image_original:
:return:
"""
we_vote_parent_image_id = None
success = False
status = ''
is_active_version = True
we_vote_image_created = False
image_url_valid = False
image_stored_from_source = False
image_stored_to_aws = False
image_versions = []
we_vote_image_manager = WeVoteImageManager()
# create we_vote_image entry with issue_we_vote_id and google_civic_election_id and kind_of_image
create_we_vote_image_results = we_vote_image_manager.create_we_vote_image(
google_civic_election_id=google_civic_election_id,
issue_we_vote_id=issue_we_vote_id,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_original=kind_of_image_original)
status += create_we_vote_image_results['status']
if not create_we_vote_image_results['we_vote_image_saved']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
return error_results
we_vote_image_created = True
we_vote_image = create_we_vote_image_results['we_vote_image']
# image file validation and get source image properties
analyze_source_images_results = analyze_image_file(issue_image_file)
if not analyze_source_images_results['image_url_valid']:
error_results = {
'success': success,
'status': status + " IMAGE_URL_NOT_VALID",
'we_vote_image_created': True,
'image_url_valid': False,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
image_url_valid = True
status += " IMAGE_URL_VALID"
image_width = analyze_source_images_results['image_width']
image_height = analyze_source_images_results['image_height']
image_format = analyze_source_images_results['image_format']
# Get today's cached images and their versions so that image version can be calculated
cached_todays_we_vote_image_list_results = we_vote_image_manager.retrieve_todays_cached_we_vote_image_list(
issue_we_vote_id=issue_we_vote_id,
kind_of_image_issue=kind_of_image_issue,
kind_of_image_original=kind_of_image_original)
for cached_we_vote_image in cached_todays_we_vote_image_list_results['we_vote_image_list']:
if cached_we_vote_image.same_day_image_version:
image_versions.append(cached_we_vote_image.same_day_image_version)
if image_versions:
same_day_image_version = max(image_versions) + 1
else:
same_day_image_version = 1
image_stored_from_source = True
date_image_saved = "{year}{:02d}{:02d}".format(we_vote_image.date_image_saved.month,
we_vote_image.date_image_saved.day,
year=we_vote_image.date_image_saved.year)
# ex issue_image_master-2017210_1_48x48.png
we_vote_image_file_name = "{image_type}_{master_image}-{date_image_saved}_{counter}_" \
"{image_width}x{image_height}.{image_format}" \
"".format(image_type=ISSUE_IMAGE_NAME,
master_image=MASTER_IMAGE, date_image_saved=date_image_saved,
counter=str(same_day_image_version),
image_width=str(image_width),
image_height=str(image_height),
image_format=str(image_format))
we_vote_image_file_location = issue_we_vote_id + "/" + we_vote_image_file_name
image_stored_to_aws = we_vote_image_manager.store_image_file_to_aws(
issue_image_file, we_vote_image_file_location)
if not image_stored_to_aws:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_TO_AWS",
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': False,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
we_vote_image_url = "https://{bucket_name}.s3.amazonaws.com/{we_vote_image_file_location}" \
"".format(bucket_name=AWS_STORAGE_BUCKET_NAME,
we_vote_image_file_location=we_vote_image_file_location)
save_aws_info = we_vote_image_manager.save_we_vote_image_aws_info(we_vote_image, we_vote_image_url,
we_vote_image_file_location,
we_vote_parent_image_id, is_active_version)
status += " IMAGE_STORED_TO_AWS " + save_aws_info['status']
success = save_aws_info['success']
if not success:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
save_source_info_results = we_vote_image_manager.save_we_vote_image_issue_info(
we_vote_image, analyze_source_images_results['image_width'],
analyze_source_images_results['image_height'], we_vote_image.we_vote_image_url,
same_day_image_version, image_url_valid)
status += " " + save_source_info_results['status']
if not save_source_info_results['success']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': False,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
# set active version False for other master images for same candidate/organization
set_active_version_false_results = we_vote_image_manager.set_active_version_false_for_other_images(
issue_we_vote_id=issue_we_vote_id,
image_url_https=we_vote_image.we_vote_image_url,
kind_of_image_issue=True)
results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': we_vote_image
}
return results
def cache_organization_sharing_image(
python_image_library_image=None,
organization_we_vote_id=None,
kind_of_image_original=False,
kind_of_image_chosen_favicon=False,
kind_of_image_chosen_logo=False,
kind_of_image_chosen_social_share_master=False):
"""
Cache master "chosen" images to AWS. This function is a more focused version of cache_image_locally
(which deals with all of the standard profile photos like Facebook, or Twitter).
:param python_image_library_image:
:param organization_we_vote_id:
:param kind_of_image_original:
:param kind_of_image_chosen_favicon:
:param kind_of_image_chosen_logo:
:param kind_of_image_chosen_social_share_master:
:return:
"""
we_vote_parent_image_id = None
success = False
status = ''
is_active_version = True
we_vote_image_created = False
image_url_valid = False
image_stored_from_source = False
image_stored_to_aws = False
image_versions = []
we_vote_image_manager = WeVoteImageManager()
create_we_vote_image_results = we_vote_image_manager.create_we_vote_image(
organization_we_vote_id=organization_we_vote_id,
kind_of_image_chosen_favicon=kind_of_image_chosen_favicon,
kind_of_image_chosen_logo=kind_of_image_chosen_logo,
kind_of_image_chosen_social_share_master=kind_of_image_chosen_social_share_master,
kind_of_image_original=kind_of_image_original)
status += create_we_vote_image_results['status']
if not create_we_vote_image_results['we_vote_image_saved']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
return error_results
we_vote_image_created = True
we_vote_image = create_we_vote_image_results['we_vote_image']
# image file validation and get source image properties
analyze_source_images_results = analyze_image_in_memory(python_image_library_image)
if not analyze_source_images_results['image_url_valid']:
error_results = {
'success': success,
'status': status + " IMAGE_URL_NOT_VALID ",
'we_vote_image_created': True,
'image_url_valid': False,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
image_url_valid = True
status += " IMAGE_URL_VALID "
image_width = analyze_source_images_results['image_width']
image_height = analyze_source_images_results['image_height']
image_format = analyze_source_images_results['image_format']
# Get today's cached images and their versions so that image version can be calculated
cached_todays_we_vote_image_list_results = we_vote_image_manager.retrieve_todays_cached_we_vote_image_list(
organization_we_vote_id=organization_we_vote_id,
kind_of_image_chosen_favicon=kind_of_image_chosen_favicon,
kind_of_image_chosen_logo=kind_of_image_chosen_logo,
kind_of_image_chosen_social_share_master=kind_of_image_chosen_social_share_master,
kind_of_image_original=kind_of_image_original)
for cached_we_vote_image in cached_todays_we_vote_image_list_results['we_vote_image_list']:
if cached_we_vote_image.same_day_image_version:
image_versions.append(cached_we_vote_image.same_day_image_version)
if image_versions:
same_day_image_version = max(image_versions) + 1
else:
same_day_image_version = 1
image_stored_from_source = True
date_image_saved = "{year}{:02d}{:02d}".format(we_vote_image.date_image_saved.month,
we_vote_image.date_image_saved.day,
year=we_vote_image.date_image_saved.year)
if kind_of_image_chosen_favicon:
image_type = CHOSEN_FAVICON_NAME
elif kind_of_image_chosen_logo:
image_type = CHOSEN_LOGO_NAME
elif kind_of_image_chosen_social_share_master:
image_type = CHOSEN_SOCIAL_SHARE_IMAGE_NAME
else:
image_type = 'organization_sharing'
if kind_of_image_original:
master_image = MASTER_IMAGE
else:
master_image = 'calculated'
# ex issue_image_master-2017210_1_48x48.png
we_vote_image_file_name = "{image_type}_{master_image}-{date_image_saved}_{counter}_" \
"{image_width}x{image_height}.{image_format}" \
"".format(image_type=image_type,
master_image=master_image,
date_image_saved=date_image_saved,
counter=str(same_day_image_version),
image_width=str(image_width),
image_height=str(image_height),
image_format=str(image_format))
we_vote_image_file_location = organization_we_vote_id + "/" + we_vote_image_file_name
image_stored_locally = we_vote_image_manager.store_python_image_locally(
python_image_library_image, we_vote_image_file_name)
if not image_stored_locally:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_LOCALLY ",
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_locally': False,
'image_stored_to_aws': image_stored_to_aws,
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
image_stored_to_aws = we_vote_image_manager.store_image_to_aws(
we_vote_image_file_name, we_vote_image_file_location, image_format)
if not image_stored_to_aws:
error_results = {
'success': success,
'status': status + " IMAGE_NOT_STORED_TO_AWS ",
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': False,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
we_vote_image_url = "https://{bucket_name}.s3.amazonaws.com/{we_vote_image_file_location}" \
"".format(bucket_name=AWS_STORAGE_BUCKET_NAME,
we_vote_image_file_location=we_vote_image_file_location)
save_aws_info = we_vote_image_manager.save_we_vote_image_aws_info(we_vote_image, we_vote_image_url,
we_vote_image_file_location,
we_vote_parent_image_id, is_active_version)
status += " IMAGE_STORED_TO_AWS " + save_aws_info['status']
success = save_aws_info['success']
if not success:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
save_source_info_results = we_vote_image_manager.save_we_vote_image_organization_share_info(
we_vote_image, analyze_source_images_results['image_width'],
analyze_source_images_results['image_height'], we_vote_image.we_vote_image_url,
same_day_image_version, image_url_valid,
kind_of_image_chosen_favicon=kind_of_image_chosen_favicon, kind_of_image_chosen_logo=kind_of_image_chosen_logo,
kind_of_image_chosen_social_share_master=kind_of_image_chosen_social_share_master)
status += " " + save_source_info_results['status']
if not save_source_info_results['success']:
error_results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': False,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': None
}
delete_we_vote_image_results = we_vote_image_manager.delete_we_vote_image(we_vote_image)
return error_results
# set active version False for other master images for same candidate/organization
set_active_version_false_results = we_vote_image_manager.set_active_version_false_for_other_images(
organization_we_vote_id=organization_we_vote_id,
image_url_https=we_vote_image.we_vote_image_url,
kind_of_image_chosen_favicon=kind_of_image_chosen_favicon,
kind_of_image_chosen_logo=kind_of_image_chosen_logo,
kind_of_image_chosen_social_share_master=kind_of_image_chosen_social_share_master)
status += set_active_version_false_results['status']
results = {
'success': success,
'status': status,
'we_vote_image_created': we_vote_image_created,
'image_url_valid': image_url_valid,
'image_stored_from_source': image_stored_from_source,
'image_stored_to_aws': image_stored_to_aws,
'we_vote_image': we_vote_image
}
return results
| 51.790028 | 120 | 0.729405 | 24,364 | 186,962 | 4.963635 | 0.016828 | 0.0706 | 0.076951 | 0.02441 | 0.92252 | 0.893347 | 0.870516 | 0.842311 | 0.808234 | 0.775985 | 0 | 0.001909 | 0.220986 | 186,962 | 3,609 | 121 | 51.804378 | 0.82842 | 0.096742 | 0 | 0.695683 | 0 | 0.000357 | 0.096472 | 0.060239 | 0 | 0 | 0 | 0.001108 | 0 | 1 | 0.011773 | false | 0 | 0.006778 | 0 | 0.043525 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
f0f1a72ab04097c643f4624d6e466e711504643f | 14,875 | py | Python | neutrino_api/controllers/telephony.py | NeutrinoAPI/NeutrinoAPI-Python | cc00933eefef0f40710f606e9fbf2dfb97a4f063 | [
"MIT"
] | 3 | 2019-01-23T19:34:01.000Z | 2019-12-05T06:21:25.000Z | neutrino_api/controllers/telephony.py | NeutrinoAPI/NeutrinoAPI-Python | cc00933eefef0f40710f606e9fbf2dfb97a4f063 | [
"MIT"
] | null | null | null | neutrino_api/controllers/telephony.py | NeutrinoAPI/NeutrinoAPI-Python | cc00933eefef0f40710f606e9fbf2dfb97a4f063 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
neutrino_api
This file was automatically generated for NeutrinoAPI by APIMATIC v2.0 ( https://apimatic.io ).
"""
from neutrino_api.api_helper import APIHelper
from neutrino_api.configuration import Configuration
from neutrino_api.controllers.base_controller import BaseController
from neutrino_api.http.auth.custom_query_auth import CustomQueryAuth
from neutrino_api.models.verify_security_code_response import VerifySecurityCodeResponse
from neutrino_api.models.hlr_lookup_response import HLRLookupResponse
from neutrino_api.models.phone_playback_response import PhonePlaybackResponse
from neutrino_api.models.sms_verify_response import SMSVerifyResponse
from neutrino_api.models.sms_message_response import SMSMessageResponse
from neutrino_api.models.phone_verify_response import PhoneVerifyResponse
class Telephony(BaseController):
"""A Controller to access Endpoints in the neutrino_api API."""
def verify_security_code(self,
security_code):
"""Does a POST request to /verify-security-code.
Check if a security code from one of the verify APIs is valid. See:
https://www.neutrinoapi.com/api/verify-security-code/
Args:
security_code (string): The security code to verify
Returns:
VerifySecurityCodeResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/verify-security-code'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare form parameters
_form_parameters = {
'output-case': 'camel',
'security-code': security_code
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=_form_parameters)
CustomQueryAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, VerifySecurityCodeResponse.from_dictionary)
def hlr_lookup(self,
number,
country_code=None):
"""Does a POST request to /hlr-lookup.
Connect to the global mobile cellular network and retrieve the status
of a mobile device. See: https://www.neutrinoapi.com/api/hlr-lookup/
Args:
number (string): A phone number
country_code (string, optional): ISO 2-letter country code, assume
numbers are based in this country. If not set numbers are
assumed to be in international format (with or without the
leading + sign)
Returns:
HLRLookupResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/hlr-lookup'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare form parameters
_form_parameters = {
'output-case': 'camel',
'number': number,
'country-code': country_code
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=_form_parameters)
CustomQueryAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, HLRLookupResponse.from_dictionary)
def phone_playback(self,
number,
audio_url):
"""Does a POST request to /phone-playback.
Make an automated call to any valid phone number and playback an audio
message. See: https://www.neutrinoapi.com/api/phone-playback/
Args:
number (string): The phone number to call. Must be in valid
international format
audio_url (string): A URL to a valid audio file. Accepted audio
formats are: <ul> <li>MP3</li> <li>WAV</li> <li>OGG</li>
</ul>You can use the following MP3 URL for testing:
https://www.neutrinoapi.com/test-files/test1.mp3
Returns:
PhonePlaybackResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/phone-playback'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare form parameters
_form_parameters = {
'output-case': 'camel',
'number': number,
'audio-url': audio_url
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=_form_parameters)
CustomQueryAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, PhonePlaybackResponse.from_dictionary)
def sms_verify(self,
number,
code_length=5,
security_code=None,
country_code=None,
language_code='en'):
"""Does a POST request to /sms-verify.
Send a unique security code to any mobile device via SMS. See:
https://www.neutrinoapi.com/api/sms-verify/
Args:
number (string): The phone number to send a verification code to
code_length (int, optional): The number of digits to use in the
security code (must be between 4 and 12)
security_code (int, optional): Pass in your own security code.
This is useful if you have implemented TOTP or similar 2FA
methods. If not set then we will generate a secure random
code
country_code (string, optional): ISO 2-letter country code, assume
numbers are based in this country. If not set numbers are
assumed to be in international format (with or without the
leading + sign)
language_code (string, optional): The language to send the
verification code in, available languages are: <ul> <li>de -
German</li> <li>en - English</li> <li>es - Spanish</li> <li>fr
- French</li> <li>it - Italian</li> <li>pt - Portuguese</li>
<li>ru - Russian</li> </ul>
Returns:
SMSVerifyResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/sms-verify'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare form parameters
_form_parameters = {
'output-case': 'camel',
'number': number,
'code-length': code_length,
'security-code': security_code,
'country-code': country_code,
'language-code': language_code
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=_form_parameters)
CustomQueryAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, SMSVerifyResponse.from_dictionary)
def sms_message(self,
number,
message,
country_code=None):
"""Does a POST request to /sms-message.
Send a free-form message to any mobile device via SMS. See:
https://www.neutrinoapi.com/api/sms-message/
Args:
number (string): The phone number to send a message to
message (string): The SMS message to send. Messages are truncated
to a maximum of 150 characters for ASCII content OR 70
characters for UTF content
country_code (string, optional): ISO 2-letter country code, assume
numbers are based in this country. If not set numbers are
assumed to be in international format (with or without the
leading + sign)
Returns:
SMSMessageResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/sms-message'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare form parameters
_form_parameters = {
'output-case': 'camel',
'number': number,
'message': message,
'country-code': country_code
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=_form_parameters)
CustomQueryAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, SMSMessageResponse.from_dictionary)
def phone_verify(self,
number,
code_length=6,
security_code=None,
playback_delay=800,
country_code=None,
language_code='en'):
"""Does a POST request to /phone-verify.
Make an automated call to any valid phone number and playback a unique
security code. See: https://www.neutrinoapi.com/api/phone-verify/
Args:
number (string): The phone number to send the verification code
to
code_length (int, optional): The number of digits to use in the
security code (between 4 and 12)
security_code (int, optional): Pass in your own security code.
This is useful if you have implemented TOTP or similar 2FA
methods. If not set then we will generate a secure random
code
playback_delay (int, optional): The delay in milliseconds between
the playback of each security code
country_code (string, optional): ISO 2-letter country code, assume
numbers are based in this country. If not set numbers are
assumed to be in international format (with or without the
leading + sign)
language_code (string, optional): The language to playback the
verification code in, available languages are: <ul> <li>de -
German</li> <li>en - English</li> <li>es - Spanish</li> <li>fr
- French</li> <li>it - Italian</li> <li>pt - Portuguese</li>
<li>ru - Russian</li> </ul>
Returns:
PhoneVerifyResponse: Response from the API.
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/phone-verify'
_query_builder = Configuration.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare form parameters
_form_parameters = {
'output-case': 'camel',
'number': number,
'code-length': code_length,
'security-code': security_code,
'playback-delay': playback_delay,
'country-code': country_code,
'language-code': language_code
}
# Prepare and execute request
_request = self.http_client.post(_query_url, headers=_headers, parameters=_form_parameters)
CustomQueryAuth.apply(_request)
_context = self.execute_request(_request)
self.validate_response(_context)
# Return appropriate type
return APIHelper.json_deserialize(_context.response.raw_body, PhoneVerifyResponse.from_dictionary)
| 39.041995 | 114 | 0.598319 | 1,614 | 14,875 | 5.33767 | 0.140644 | 0.036216 | 0.03343 | 0.034823 | 0.756703 | 0.736158 | 0.725711 | 0.714335 | 0.706674 | 0.692513 | 0 | 0.002931 | 0.334924 | 14,875 | 380 | 115 | 39.144737 | 0.867886 | 0.453714 | 0 | 0.648276 | 1 | 0 | 0.07629 | 0.003141 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041379 | false | 0 | 0.068966 | 0 | 0.158621 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
0b13d49e2ca50ae5128b881792897cd8ab7c7b1d | 4,050 | py | Python | src/conference_scheduler/validator.py | ChahalMandeep/ConferenceScheduler | e5a0eff8796ba5711b5c3b591ff4b9a2e0f61a04 | [
"MIT"
] | 46 | 2017-04-30T17:37:38.000Z | 2022-03-29T12:52:38.000Z | src/conference_scheduler/validator.py | ChahalMandeep/ConferenceScheduler | e5a0eff8796ba5711b5c3b591ff4b9a2e0f61a04 | [
"MIT"
] | 95 | 2017-05-01T16:30:04.000Z | 2018-11-07T10:48:50.000Z | src/conference_scheduler/validator.py | ChahalMandeep/ConferenceScheduler | e5a0eff8796ba5711b5c3b591ff4b9a2e0f61a04 | [
"MIT"
] | 11 | 2017-05-01T16:17:46.000Z | 2022-03-29T12:52:41.000Z | from conference_scheduler import converter
from conference_scheduler.lp_problem import constraints
def array_violations(array, events, slots, beta=None):
"""Take a schedule in array form and return any violated constraints
Parameters
----------
array : np.array
a schedule in array form
events : list or tuple
of resources.Event instances
slots : list or tuple
of resources.Slot instances
constraints : list or tuple
of generator functions which each produce instances of
resources.Constraint
Returns
-------
Generator
of a list of strings indicating the nature of the violated
constraints
"""
return (
c.label
for c in constraints.all_constraints(events, slots, array, beta=beta)
if not c.condition
)
def is_valid_array(array, events, slots):
"""Take a schedule in array form and return whether it is a valid
solution for the given constraints
Parameters
----------
array : np.array
a schedule in array form
events : list or tuple
of resources.Event instances
slots : list or tuple
of resources.Slot instances
Returns
-------
bool
True if array represents a valid solution
"""
if len(array) == 0:
return False
violations = sum(1 for c in (array_violations(array, events, slots)))
return violations == 0
def is_valid_solution(solution, events, slots):
"""Take a solution and return whether it is valid for the
given constraints
Parameters
----------
solution: list or tuple
a schedule in solution form
events : list or tuple
of resources.Event instances
slots : list or tuple
of resources.Slot instances
Returns
-------
bool
True if schedule is a valid solution
"""
if len(solution) == 0:
return False
array = converter.solution_to_array(solution, events, slots)
return is_valid_array(array, events, slots)
def solution_violations(solution, events, slots):
"""Take a solution and return a list of violated constraints
Parameters
----------
solution: list or tuple
a schedule in solution form
events : list or tuple
of resources.Event instances
slots : list or tuple
of resources.Slot instances
Returns
-------
Generator
of a list of strings indicating the nature of the violated
constraints
"""
array = converter.solution_to_array(solution, events, slots)
return array_violations(array, events, slots)
def is_valid_schedule(schedule, events, slots):
"""Take a schedule and return whether it is a valid solution for the
given constraints
Parameters
----------
schedule : list or tuple
a schedule in schedule form
events : list or tuple
of resources.Event instances
slots : list or tuple
of resources.Slot instances
Returns
-------
bool
True if schedule is a valid solution
"""
if len(schedule) == 0:
return False
array = converter.schedule_to_array(schedule, events, slots)
return is_valid_array(array, events, slots)
def schedule_violations(schedule, events, slots):
"""Take a schedule and return a list of violated constraints
Parameters
----------
schedule : list or tuple
a schedule in schedule form
events : list or tuple
of resources.Event instances
slots : list or tuple
of resources.Slot instances
Returns
-------
Generator
of a list of strings indicating the nature of the violated
constraints
"""
array = converter.schedule_to_array(schedule, events, slots)
return array_violations(array, events, slots)
| 27.364865 | 77 | 0.613086 | 475 | 4,050 | 5.168421 | 0.141053 | 0.041548 | 0.076171 | 0.068839 | 0.819145 | 0.767413 | 0.756008 | 0.756008 | 0.632179 | 0.56945 | 0 | 0.001808 | 0.317284 | 4,050 | 147 | 78 | 27.55102 | 0.886076 | 0.590617 | 0 | 0.37931 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.206897 | false | 0 | 0.068966 | 0 | 0.586207 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 6 |
9be603d18a2e9df5c74038d063039d768c629ddd | 7,157 | py | Python | seleniumwire/webdriver/browser.py | axgdev/selenium-wire | 8c0d2b5f144d04b5050f700abfd45b96d24d7330 | [
"MIT"
] | null | null | null | seleniumwire/webdriver/browser.py | axgdev/selenium-wire | 8c0d2b5f144d04b5050f700abfd45b96d24d7330 | [
"MIT"
] | null | null | null | seleniumwire/webdriver/browser.py | axgdev/selenium-wire | 8c0d2b5f144d04b5050f700abfd45b96d24d7330 | [
"MIT"
] | null | null | null | from selenium.webdriver import Chrome as _Chrome
from selenium.webdriver import ChromeOptions
from selenium.webdriver import Edge as _Edge
from selenium.webdriver import Firefox as _Firefox
from selenium.webdriver import Remote as _Remote
from selenium.webdriver import Safari as _Safari
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from ..proxy import backend
from .request import InspectRequestsMixin
class Firefox(InspectRequestsMixin, _Firefox):
"""Extends the Firefox webdriver to provide additional methods for inspecting requests."""
def __init__(self, *args, seleniumwire_options=None, **kwargs):
"""Initialise a new Firefox WebDriver instance.
Args:
seleniumwire_options: The seleniumwire options dictionary.
"""
if seleniumwire_options is None:
seleniumwire_options = {}
self.proxy = backend.create(
port=seleniumwire_options.get('port', 0),
options=seleniumwire_options
)
if 'port' not in seleniumwire_options: # Auto config mode
try:
capabilities = dict(kwargs.pop('desired_capabilities'))
except KeyError:
capabilities = DesiredCapabilities.FIREFOX.copy()
addr, port = self.proxy.address()
capabilities['proxy'] = {
'proxyType': 'manual',
'httpProxy': '{}:{}'.format(addr, port),
'sslProxy': '{}:{}'.format(addr, port),
'noProxy': seleniumwire_options.pop('exclude_hosts', []),
}
capabilities['acceptInsecureCerts'] = True
kwargs['capabilities'] = capabilities
super().__init__(*args, **kwargs)
def quit(self):
self.proxy.shutdown()
super().quit()
class Chrome(InspectRequestsMixin, _Chrome):
"""Extends the Chrome webdriver to provide additional methods for inspecting requests."""
def __init__(self, *args, seleniumwire_options=None, **kwargs):
"""Initialise a new Chrome WebDriver instance.
Args:
seleniumwire_options: The seleniumwire options dictionary.
"""
if seleniumwire_options is None:
seleniumwire_options = {}
self.proxy = backend.create(
port=seleniumwire_options.get('port', 0),
options=seleniumwire_options
)
if 'port' not in seleniumwire_options: # Auto config mode
try:
capabilities = dict(kwargs.pop('desired_capabilities'))
except KeyError:
capabilities = DesiredCapabilities.CHROME.copy()
addr, port = self.proxy.address()
capabilities['proxy'] = {
'proxyType': 'manual',
'httpProxy': '{}:{}'.format(addr, port),
'sslProxy': '{}:{}'.format(addr, port),
'noProxy': ','.join(seleniumwire_options.pop('exclude_hosts', []))
}
capabilities['acceptInsecureCerts'] = True
kwargs['desired_capabilities'] = capabilities
try:
chrome_options = kwargs.pop('options')
except KeyError:
chrome_options = ChromeOptions()
# Prevent Chrome from bypassing the Selenium Wire proxy
# for localhost addresses.
chrome_options.add_argument('proxy-bypass-list=<-loopback>')
kwargs['options'] = chrome_options
super().__init__(*args, **kwargs)
def quit(self):
self.proxy.shutdown()
super().quit()
class Safari(InspectRequestsMixin, _Safari):
"""Extends the Safari webdriver to provide additional methods for inspecting requests."""
def __init__(self, seleniumwire_options=None, *args, **kwargs):
"""Initialise a new Safari WebDriver instance.
Args:
seleniumwire_options: The seleniumwire options dictionary.
"""
if seleniumwire_options is None:
seleniumwire_options = {}
# Safari does not support automatic proxy configuration through the
# DesiredCapabilities API, and thus has to be configured manually.
# Whatever port number is chosen for that manual configuration has to
# be passed in the options.
assert 'port' in seleniumwire_options, 'You must set a port number in the seleniumwire_options'
self.proxy = backend.create(
port=seleniumwire_options.pop('port', 0),
options=seleniumwire_options
)
super().__init__(*args, **kwargs)
def quit(self):
self.proxy.shutdown()
super().quit()
class Edge(InspectRequestsMixin, _Edge):
"""Extends the Edge webdriver to provide additional methods for inspecting requests."""
def __init__(self, seleniumwire_options=None, *args, **kwargs):
"""Initialise a new Edge WebDriver instance.
Args:
seleniumwire_options: The seleniumwire options dictionary.
"""
if seleniumwire_options is None:
seleniumwire_options = {}
# Edge does not support automatic proxy configuration through the
# DesiredCapabilities API, and thus has to be configured manually.
# Whatever port number is chosen for that manual configuration has to
# be passed in the options.
assert 'port' in seleniumwire_options, 'You must set a port number in the seleniumwire_options'
self.proxy = backend.create(
port=seleniumwire_options.pop('port', 0),
options=seleniumwire_options
)
super().__init__(*args, **kwargs)
def quit(self):
self.proxy.shutdown()
super().quit()
class Remote(InspectRequestsMixin, _Remote):
"""Extends the Remote webdriver to provide additional methods for inspecting requests."""
def __init__(self, *args, seleniumwire_options=None, **kwargs):
"""Initialise a new Firefox WebDriver instance.
Args:
seleniumwire_options: The seleniumwire options dictionary.
"""
if seleniumwire_options is None:
seleniumwire_options = {}
self.proxy = backend.create(
addr=seleniumwire_options.pop('addr'),
port=seleniumwire_options.get('port', 0),
options=seleniumwire_options
)
if "port" not in seleniumwire_options: # Auto config mode
try:
capabilities = dict(kwargs.pop("desired_capabilities"))
except KeyError:
capabilities = DesiredCapabilities.FIREFOX.copy()
addr, port = self.proxy.address()
capabilities["proxy"] = {
"proxyType": "manual",
"httpProxy": "{}:{}".format(addr, port),
"sslProxy": "{}:{}".format(addr, port),
"noProxy": seleniumwire_options.pop('exclude_hosts', []),
}
capabilities["acceptInsecureCerts"] = True
kwargs["desired_capabilities"] = capabilities
super().__init__(*args, **kwargs)
def quit(self):
self.proxy.shutdown()
super().quit()
| 34.408654 | 103 | 0.622048 | 689 | 7,157 | 6.309144 | 0.153846 | 0.201058 | 0.042328 | 0.037267 | 0.796181 | 0.796181 | 0.796181 | 0.796181 | 0.796181 | 0.796181 | 0 | 0.000971 | 0.280425 | 7,157 | 207 | 104 | 34.574879 | 0.843107 | 0.218946 | 0 | 0.658333 | 0 | 0 | 0.104776 | 0.005368 | 0 | 0 | 0 | 0 | 0.016667 | 1 | 0.083333 | false | 0.008333 | 0.075 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
500f2ae3b8d67964d74cf5384c61883a7d4b77d2 | 7,319 | py | Python | app/backend/core/models/flow_parser_helper_basic.py | SummaLabs/DLS | 2adba47430b456ad0f324e4c8883a896a23b3fbf | [
"MIT"
] | 32 | 2017-09-04T17:40:39.000Z | 2021-02-16T23:08:34.000Z | app/backend/core/models/flow_parser_helper_basic.py | AymanNabih/DLS | 2adba47430b456ad0f324e4c8883a896a23b3fbf | [
"MIT"
] | 3 | 2017-10-09T12:52:54.000Z | 2020-06-29T02:48:38.000Z | app/backend/core/models/flow_parser_helper_basic.py | AymanNabih/DLS | 2adba47430b456ad0f324e4c8883a896a23b3fbf | [
"MIT"
] | 20 | 2017-10-07T17:29:50.000Z | 2021-01-23T22:01:54.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'ar'
import json
####################################
# values: (is Available, is Correct but currently not available)
dictAvailableConnectionsFromTo = {
'datainput' : {
'datainput' : (False, None),
'convolution1d' : (True, None),
'convolution2d' : (True, None),
'convolution3d' : (True, None),
'pooling1d' : (True, None),
'pooling2d' : (True, None),
'pooling3d' : (True, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'convolution1d' : {
'datainput' : (False, None),
'convolution1d' : (True, None),
'convolution2d' : (False, None),
'convolution3d' : (False, None),
'pooling1d' : (True, None),
'pooling2d' : (False, None),
'pooling3d' : (False, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'convolution2d': {
'datainput' : (False, None),
'convolution1d' : (False, None),
'convolution2d' : (True, None),
'convolution3d' : (False, None),
'pooling1d' : (False, None),
'pooling2d' : (True, None),
'pooling3d' : (False, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'convolution3d': {
'datainput' : (False, None),
'convolution1d' : (False, None),
'convolution2d' : (False, None),
'convolution3d' : (True, None),
'pooling1d' : (False, None),
'pooling2d' : (False, None),
'pooling3d' : (True, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'pooling1d': {
'datainput' : (False, None),
'convolution1d' : (True, None),
'convolution2d' : (False, None),
'convolution3d' : (False, None),
'pooling1d' : (True, None),
'pooling2d' : (False, None),
'pooling3d' : (False, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'pooling2d': {
'datainput' : (False, None),
'convolution1d' : (False, None),
'convolution2d' : (True, None),
'convolution3d' : (False, None),
'pooling1d' : (False, None),
'pooling2d' : (True, None),
'pooling3d' : (False, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'pooling3d': {
'datainput' : (False, None),
'convolution1d' : (False, None),
'convolution2d' : (False, None),
'convolution3d' : (True, None),
'pooling1d' : (False, None),
'pooling2d' : (False, None),
'pooling3d' : (True, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'flatten': {
'datainput' : (False, None),
'convolution1d' : (True, None),
'convolution2d' : (False, None),
'convolution3d' : (False, None),
'pooling1d' : (True, None),
'pooling2d' : (False, None),
'pooling3d' : (False, None),
'flatten' : (False, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (False, None)
},
'activation': {
'datainput' : (False, None),
'convolution1d' : (True, None),
'convolution2d' : (True, None),
'convolution3d' : (True, None),
'pooling1d' : (True, None),
'pooling2d' : (True, None),
'pooling3d' : (True, None),
'flatten' : (True, None),
'activation' : (False, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (True, None)
},
'merge': {
'datainput' : (False, None),
'convolution1d' : (True, None),
'convolution2d' : (False, None),
'convolution3d' : (False, None),
'pooling1d' : (True, None),
'pooling2d' : (False, None),
'pooling3d' : (False, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (True, None)
},
'dense' : {
'datainput' : (False, None),
'convolution1d' : (True, None),
'convolution2d' : (False, None),
'convolution3d' : (False, None),
'pooling1d' : (True, None),
'pooling2d' : (False, None),
'pooling3d' : (False, None),
'flatten' : (True, None),
'activation' : (True, None),
'merge' : (True, None),
'dense' : (True, None),
'dataoutput' : (True, None)
},
'dataoutput' : {
'datainput' : (False, None),
'convolution1d' : (False, None),
'convolution2d' : (False, None),
'convolution3d' : (False, None),
'pooling1d' : (False, None),
'pooling2d' : (False, None),
'pooling3d' : (False, None),
'flatten' : (False, None),
'activation' : (False, None),
'merge' : (False, None),
'dense' : (False, None),
'dataoutput' : (False, None)
}
}
dictRequiredFields = {
'datainput' : ['datasetId'],
'convolution1d' : ['filtersCount', 'filterWidth', 'activationFunction', 'isTrainable'],
'convolution2d' : ['filtersCount', 'filterWidth', 'filterHeight', 'activationFunction', 'isTrainable'],
'convolution3d' : ['filtersCount', 'filterWidth', 'filterHeight', 'filterDepth', 'activationFunction', 'isTrainable'],
'pooling1d' : ['subsamplingSizeWidth', 'subsamplingType'],
'pooling2d' : ['subsamplingSizeWidth', 'subsamplingSizeHeight', 'subsamplingType'],
'pooling3d' : ['subsamplingSizeWidth', 'subsamplingSizeHeight', 'subsamplingSizeDepth', 'subsamplingType'],
'flatten' : [],
'activation' : ['activationFunction'],
'merge' : ['mergeType', 'mergeAxis'],
'dense' : ['neuronsCount', 'activationFunction', 'isTrainable'],
'dataoutput' : ['lossFunction', 'datasetId']
}
if __name__ == '__main__':
pass | 37.341837 | 122 | 0.467004 | 514 | 7,319 | 6.626459 | 0.103113 | 0.17616 | 0.083969 | 0.109219 | 0.740752 | 0.735173 | 0.735173 | 0.735173 | 0.72842 | 0.72842 | 0 | 0.018035 | 0.35606 | 7,319 | 196 | 123 | 37.341837 | 0.704647 | 0.0138 | 0 | 0.755319 | 0 | 0 | 0.28454 | 0.00585 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.005319 | 0.005319 | 0 | 0.005319 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
5039aaae421b0f0e74c90af698218b5d45ecbeb2 | 69,147 | py | Python | tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py | wuyuexin/python-dialogflow-cx | 80f36ad67c8bb6f27dc8c2c5271451b8fea48508 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py | wuyuexin/python-dialogflow-cx | 80f36ad67c8bb6f27dc8c2c5271451b8fea48508 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/dialogflowcx_v3beta1/test_intents.py | wuyuexin/python-dialogflow-cx | 80f36ad67c8bb6f27dc8c2c5271451b8fea48508 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google import auth
from google.api_core import client_options
from google.api_core import exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.dialogflowcx_v3beta1.services.intents import IntentsAsyncClient
from google.cloud.dialogflowcx_v3beta1.services.intents import IntentsClient
from google.cloud.dialogflowcx_v3beta1.services.intents import pagers
from google.cloud.dialogflowcx_v3beta1.services.intents import transports
from google.cloud.dialogflowcx_v3beta1.types import intent
from google.cloud.dialogflowcx_v3beta1.types import intent as gcdc_intent
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert IntentsClient._get_default_mtls_endpoint(None) is None
assert IntentsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert (
IntentsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
)
assert (
IntentsClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
IntentsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert IntentsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [IntentsClient, IntentsAsyncClient])
def test_intents_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client._transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
assert client._transport._credentials == creds
assert client._transport._host == "dialogflow.googleapis.com:443"
def test_intents_client_get_transport_class():
transport = IntentsClient.get_transport_class()
assert transport == transports.IntentsGrpcTransport
transport = IntentsClient.get_transport_class("grpc")
assert transport == transports.IntentsGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(IntentsClient, transports.IntentsGrpcTransport, "grpc"),
(IntentsAsyncClient, transports.IntentsGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
@mock.patch.object(
IntentsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IntentsClient)
)
@mock.patch.object(
IntentsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IntentsAsyncClient)
)
def test_intents_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(IntentsClient, "get_transport_class") as gtc:
transport = transport_class(credentials=credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(IntentsClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
api_mtls_endpoint="squid.clam.whelk",
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
# "auto", and client_cert_source is provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
client_cert_source=client_cert_source_callback,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
# "auto", and default_client_cert_source is provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
# "auto", but client_cert_source and default_client_cert_source are None.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(IntentsClient, transports.IntentsGrpcTransport, "grpc"),
(IntentsAsyncClient, transports.IntentsGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_intents_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(IntentsClient, transports.IntentsGrpcTransport, "grpc"),
(IntentsAsyncClient, transports.IntentsGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_intents_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
api_mtls_endpoint=client.DEFAULT_ENDPOINT,
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_intents_client_client_options_from_dict():
with mock.patch(
"google.cloud.dialogflowcx_v3beta1.services.intents.transports.IntentsGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = IntentsClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
api_mtls_endpoint="squid.clam.whelk",
client_cert_source=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_list_intents(transport: str = "grpc", request_type=intent.ListIntentsRequest):
client = IntentsClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_intents), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = intent.ListIntentsResponse(
next_page_token="next_page_token_value",
)
response = client.list_intents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == intent.ListIntentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListIntentsPager)
assert response.next_page_token == "next_page_token_value"
def test_list_intents_from_dict():
test_list_intents(request_type=dict)
@pytest.mark.asyncio
async def test_list_intents_async(transport: str = "grpc_asyncio"):
client = IntentsAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = intent.ListIntentsRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_intents), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
intent.ListIntentsResponse(next_page_token="next_page_token_value",)
)
response = await client.list_intents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListIntentsAsyncPager)
assert response.next_page_token == "next_page_token_value"
def test_list_intents_field_headers():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = intent.ListIntentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_intents), "__call__") as call:
call.return_value = intent.ListIntentsResponse()
client.list_intents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_intents_field_headers_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = intent.ListIntentsRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_intents), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
intent.ListIntentsResponse()
)
await client.list_intents(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_intents_flattened():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_intents), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = intent.ListIntentsResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_intents(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_intents_flattened_error():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_intents(
intent.ListIntentsRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_intents_flattened_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_intents), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = intent.ListIntentsResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
intent.ListIntentsResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_intents(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_intents_flattened_error_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_intents(
intent.ListIntentsRequest(), parent="parent_value",
)
def test_list_intents_pager():
client = IntentsClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_intents), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
intent.ListIntentsResponse(
intents=[intent.Intent(), intent.Intent(), intent.Intent(),],
next_page_token="abc",
),
intent.ListIntentsResponse(intents=[], next_page_token="def",),
intent.ListIntentsResponse(
intents=[intent.Intent(),], next_page_token="ghi",
),
intent.ListIntentsResponse(intents=[intent.Intent(), intent.Intent(),],),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_intents(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, intent.Intent) for i in results)
def test_list_intents_pages():
client = IntentsClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.list_intents), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
intent.ListIntentsResponse(
intents=[intent.Intent(), intent.Intent(), intent.Intent(),],
next_page_token="abc",
),
intent.ListIntentsResponse(intents=[], next_page_token="def",),
intent.ListIntentsResponse(
intents=[intent.Intent(),], next_page_token="ghi",
),
intent.ListIntentsResponse(intents=[intent.Intent(), intent.Intent(),],),
RuntimeError,
)
pages = list(client.list_intents(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_intents_async_pager():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_intents),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
intent.ListIntentsResponse(
intents=[intent.Intent(), intent.Intent(), intent.Intent(),],
next_page_token="abc",
),
intent.ListIntentsResponse(intents=[], next_page_token="def",),
intent.ListIntentsResponse(
intents=[intent.Intent(),], next_page_token="ghi",
),
intent.ListIntentsResponse(intents=[intent.Intent(), intent.Intent(),],),
RuntimeError,
)
async_pager = await client.list_intents(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, intent.Intent) for i in responses)
@pytest.mark.asyncio
async def test_list_intents_async_pages():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.list_intents),
"__call__",
new_callable=mock.AsyncMock,
) as call:
# Set the response to a series of pages.
call.side_effect = (
intent.ListIntentsResponse(
intents=[intent.Intent(), intent.Intent(), intent.Intent(),],
next_page_token="abc",
),
intent.ListIntentsResponse(intents=[], next_page_token="def",),
intent.ListIntentsResponse(
intents=[intent.Intent(),], next_page_token="ghi",
),
intent.ListIntentsResponse(intents=[intent.Intent(), intent.Intent(),],),
RuntimeError,
)
pages = []
async for page_ in (await client.list_intents(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_intent(transport: str = "grpc", request_type=intent.GetIntentRequest):
client = IntentsClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = intent.Intent(
name="name_value",
display_name="display_name_value",
priority=898,
is_fallback=True,
)
response = client.get_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == intent.GetIntentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, intent.Intent)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.priority == 898
assert response.is_fallback is True
def test_get_intent_from_dict():
test_get_intent(request_type=dict)
@pytest.mark.asyncio
async def test_get_intent_async(transport: str = "grpc_asyncio"):
client = IntentsAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = intent.GetIntentRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
intent.Intent(
name="name_value",
display_name="display_name_value",
priority=898,
is_fallback=True,
)
)
response = await client.get_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, intent.Intent)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.priority == 898
assert response.is_fallback is True
def test_get_intent_field_headers():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = intent.GetIntentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_intent), "__call__") as call:
call.return_value = intent.Intent()
client.get_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_intent_field_headers_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = intent.GetIntentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_intent), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(intent.Intent())
await client.get_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_intent_flattened():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.get_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = intent.Intent()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_intent(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_intent_flattened_error():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_intent(
intent.GetIntentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_intent_flattened_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.get_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = intent.Intent()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(intent.Intent())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_intent(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_intent_flattened_error_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_intent(
intent.GetIntentRequest(), name="name_value",
)
def test_create_intent(
transport: str = "grpc", request_type=gcdc_intent.CreateIntentRequest
):
client = IntentsClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gcdc_intent.Intent(
name="name_value",
display_name="display_name_value",
priority=898,
is_fallback=True,
)
response = client.create_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcdc_intent.CreateIntentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcdc_intent.Intent)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.priority == 898
assert response.is_fallback is True
def test_create_intent_from_dict():
test_create_intent(request_type=dict)
@pytest.mark.asyncio
async def test_create_intent_async(transport: str = "grpc_asyncio"):
client = IntentsAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = gcdc_intent.CreateIntentRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcdc_intent.Intent(
name="name_value",
display_name="display_name_value",
priority=898,
is_fallback=True,
)
)
response = await client.create_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, gcdc_intent.Intent)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.priority == 898
assert response.is_fallback is True
def test_create_intent_field_headers():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcdc_intent.CreateIntentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_intent), "__call__") as call:
call.return_value = gcdc_intent.Intent()
client.create_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_intent_field_headers_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcdc_intent.CreateIntentRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_intent), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcdc_intent.Intent())
await client.create_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_intent_flattened():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.create_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gcdc_intent.Intent()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_intent(
parent="parent_value", intent=gcdc_intent.Intent(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].intent == gcdc_intent.Intent(name="name_value")
def test_create_intent_flattened_error():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_intent(
gcdc_intent.CreateIntentRequest(),
parent="parent_value",
intent=gcdc_intent.Intent(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_intent_flattened_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.create_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcdc_intent.Intent()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcdc_intent.Intent())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_intent(
parent="parent_value", intent=gcdc_intent.Intent(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].intent == gcdc_intent.Intent(name="name_value")
@pytest.mark.asyncio
async def test_create_intent_flattened_error_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_intent(
gcdc_intent.CreateIntentRequest(),
parent="parent_value",
intent=gcdc_intent.Intent(name="name_value"),
)
def test_update_intent(
transport: str = "grpc", request_type=gcdc_intent.UpdateIntentRequest
):
client = IntentsClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.update_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gcdc_intent.Intent(
name="name_value",
display_name="display_name_value",
priority=898,
is_fallback=True,
)
response = client.update_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == gcdc_intent.UpdateIntentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, gcdc_intent.Intent)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.priority == 898
assert response.is_fallback is True
def test_update_intent_from_dict():
test_update_intent(request_type=dict)
@pytest.mark.asyncio
async def test_update_intent_async(transport: str = "grpc_asyncio"):
client = IntentsAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = gcdc_intent.UpdateIntentRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.update_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
gcdc_intent.Intent(
name="name_value",
display_name="display_name_value",
priority=898,
is_fallback=True,
)
)
response = await client.update_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, gcdc_intent.Intent)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.priority == 898
assert response.is_fallback is True
def test_update_intent_field_headers():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcdc_intent.UpdateIntentRequest()
request.intent.name = "intent.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.update_intent), "__call__") as call:
call.return_value = gcdc_intent.Intent()
client.update_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "intent.name=intent.name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_update_intent_field_headers_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = gcdc_intent.UpdateIntentRequest()
request.intent.name = "intent.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.update_intent), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcdc_intent.Intent())
await client.update_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "intent.name=intent.name/value",) in kw["metadata"]
def test_update_intent_flattened():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.update_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = gcdc_intent.Intent()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_intent(
intent=gcdc_intent.Intent(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].intent == gcdc_intent.Intent(name="name_value")
assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
def test_update_intent_flattened_error():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_intent(
gcdc_intent.UpdateIntentRequest(),
intent=gcdc_intent.Intent(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
@pytest.mark.asyncio
async def test_update_intent_flattened_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.update_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = gcdc_intent.Intent()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gcdc_intent.Intent())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_intent(
intent=gcdc_intent.Intent(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].intent == gcdc_intent.Intent(name="name_value")
assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
@pytest.mark.asyncio
async def test_update_intent_flattened_error_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_intent(
gcdc_intent.UpdateIntentRequest(),
intent=gcdc_intent.Intent(name="name_value"),
update_mask=field_mask.FieldMask(paths=["paths_value"]),
)
def test_delete_intent(
transport: str = "grpc", request_type=intent.DeleteIntentRequest
):
client = IntentsClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == intent.DeleteIntentRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_intent_from_dict():
test_delete_intent(request_type=dict)
@pytest.mark.asyncio
async def test_delete_intent_async(transport: str = "grpc_asyncio"):
client = IntentsAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = intent.DeleteIntentRequest()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
def test_delete_intent_field_headers():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = intent.DeleteIntentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_intent), "__call__") as call:
call.return_value = None
client.delete_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_intent_field_headers_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = intent.DeleteIntentRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_intent), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_intent(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_intent_flattened():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client._transport.delete_intent), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_intent(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_intent_flattened_error():
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_intent(
intent.DeleteIntentRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_intent_flattened_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client._client._transport.delete_intent), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_intent(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_intent_flattened_error_async():
client = IntentsAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_intent(
intent.DeleteIntentRequest(), name="name_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.IntentsGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = IntentsClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.IntentsGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = IntentsClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.IntentsGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = IntentsClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.IntentsGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
client = IntentsClient(transport=transport)
assert client._transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.IntentsGrpcTransport(
credentials=credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.IntentsGrpcAsyncIOTransport(
credentials=credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = IntentsClient(credentials=credentials.AnonymousCredentials(),)
assert isinstance(client._transport, transports.IntentsGrpcTransport,)
def test_intents_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(exceptions.DuplicateCredentialArgs):
transport = transports.IntentsTransport(
credentials=credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_intents_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.dialogflowcx_v3beta1.services.intents.transports.IntentsTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.IntentsTransport(
credentials=credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"list_intents",
"get_intent",
"create_intent",
"update_intent",
"delete_intent",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
def test_intents_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
auth, "load_credentials_from_file"
) as load_creds, mock.patch(
"google.cloud.dialogflowcx_v3beta1.services.intents.transports.IntentsTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (credentials.AnonymousCredentials(), None)
transport = transports.IntentsTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id="octopus",
)
def test_intents_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
IntentsClient()
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id=None,
)
def test_intents_transport_auth_adc():
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(auth, "default") as adc:
adc.return_value = (credentials.AnonymousCredentials(), None)
transports.IntentsGrpcTransport(
host="squid.clam.whelk", quota_project_id="octopus"
)
adc.assert_called_once_with(
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
quota_project_id="octopus",
)
def test_intents_host_no_port():
client = IntentsClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="dialogflow.googleapis.com"
),
)
assert client._transport._host == "dialogflow.googleapis.com:443"
def test_intents_host_with_port():
client = IntentsClient(
credentials=credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="dialogflow.googleapis.com:8000"
),
)
assert client._transport._host == "dialogflow.googleapis.com:8000"
def test_intents_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
# Check that if channel is provided, mtls endpoint and client_cert_source
# won't be used.
callback = mock.MagicMock()
transport = transports.IntentsGrpcTransport(
host="squid.clam.whelk",
channel=channel,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=callback,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert not callback.called
def test_intents_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
# Check that if channel is provided, mtls endpoint and client_cert_source
# won't be used.
callback = mock.MagicMock()
transport = transports.IntentsGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=callback,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert not callback.called
@mock.patch("grpc.ssl_channel_credentials", autospec=True)
@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
def test_intents_grpc_transport_channel_mtls_with_client_cert_source(
grpc_create_channel, grpc_ssl_channel_cred
):
# Check that if channel is None, but api_mtls_endpoint and client_cert_source
# are provided, then a mTLS channel will be created.
mock_cred = mock.Mock()
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
transport = transports.IntentsGrpcTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
@mock.patch("grpc.ssl_channel_credentials", autospec=True)
@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
def test_intents_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
grpc_create_channel, grpc_ssl_channel_cred
):
# Check that if channel is None, but api_mtls_endpoint and client_cert_source
# are provided, then a mTLS channel will be created.
mock_cred = mock.Mock()
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
transport = transports.IntentsGrpcAsyncIOTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
@pytest.mark.parametrize(
"api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
)
@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
def test_intents_grpc_transport_channel_mtls_with_adc(
grpc_create_channel, api_mtls_endpoint
):
# Check that if channel and client_cert_source are None, but api_mtls_endpoint
# is provided, then a mTLS channel will be created with SSL ADC.
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
# Mock google.auth.transport.grpc.SslCredentials class.
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
mock_cred = mock.Mock()
transport = transports.IntentsGrpcTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint=api_mtls_endpoint,
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
@pytest.mark.parametrize(
"api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
)
@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
def test_intents_grpc_asyncio_transport_channel_mtls_with_adc(
grpc_create_channel, api_mtls_endpoint
):
# Check that if channel and client_cert_source are None, but api_mtls_endpoint
# is provided, then a mTLS channel will be created with SSL ADC.
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
# Mock google.auth.transport.grpc.SslCredentials class.
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
mock_cred = mock.Mock()
transport = transports.IntentsGrpcAsyncIOTransport(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint=api_mtls_endpoint,
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=(
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/dialogflow",
),
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
)
assert transport.grpc_channel == mock_grpc_channel
def test_intent_path():
project = "squid"
location = "clam"
agent = "whelk"
intent = "octopus"
expected = "projects/{project}/locations/{location}/agents/{agent}/intents/{intent}".format(
project=project, location=location, agent=agent, intent=intent,
)
actual = IntentsClient.intent_path(project, location, agent, intent)
assert expected == actual
def test_parse_intent_path():
expected = {
"project": "oyster",
"location": "nudibranch",
"agent": "cuttlefish",
"intent": "mussel",
}
path = IntentsClient.intent_path(**expected)
# Check that the path construction is reversible.
actual = IntentsClient.parse_intent_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.IntentsTransport, "_prep_wrapped_messages"
) as prep:
client = IntentsClient(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.IntentsTransport, "_prep_wrapped_messages"
) as prep:
transport_class = IntentsClient.get_transport_class()
transport = transport_class(
credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
| 36.9375 | 111 | 0.685149 | 8,192 | 69,147 | 5.557739 | 0.044556 | 0.02214 | 0.019987 | 0.021283 | 0.897208 | 0.87285 | 0.855543 | 0.839707 | 0.821454 | 0.808254 | 0 | 0.004366 | 0.228224 | 69,147 | 1,871 | 112 | 36.957242 | 0.84878 | 0.204926 | 0 | 0.69073 | 0 | 0 | 0.094879 | 0.034636 | 0 | 0 | 0 | 0 | 0.139459 | 1 | 0.045939 | false | 0 | 0.018868 | 0.001641 | 0.066448 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
504bac9a1dd1041a958e3a2e282e1d8eee020219 | 4,213 | py | Python | tests/changes/models/test_snapshot.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 443 | 2015-01-03T16:28:39.000Z | 2021-04-26T16:39:46.000Z | tests/changes/models/test_snapshot.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 12 | 2015-07-30T19:07:16.000Z | 2016-11-07T23:11:21.000Z | tests/changes/models/test_snapshot.py | vault-the/changes | 37e23c3141b75e4785cf398d015e3dbca41bdd56 | [
"Apache-2.0"
] | 47 | 2015-01-09T10:04:00.000Z | 2020-11-18T17:58:19.000Z | from changes.config import db
from changes.models.project import ProjectOption
from changes.models.snapshot import Snapshot, SnapshotImage, SnapshotStatus
from changes.testutils.cases import TestCase
class TestSnapshotTestCase(TestCase):
def test_snapshot_change_status(self):
project = self.create_project()
plan_1 = self.create_plan(project)
plan_2 = self.create_plan(project)
snapshot = self.create_snapshot(project, status=SnapshotStatus.pending)
snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1)
snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2)
assert Snapshot.query.get(snapshot.id).status == SnapshotStatus.pending
snapshot_image_1.change_status(SnapshotStatus.active)
assert Snapshot.query.get(snapshot.id).status == SnapshotStatus.pending
snapshot_image_2.change_status(SnapshotStatus.active)
assert Snapshot.query.get(snapshot.id).status == SnapshotStatus.active
def test_snapshot_invalidated(self):
project = self.create_project()
plan_1 = self.create_plan(project)
plan_2 = self.create_plan(project)
snapshot = self.create_snapshot(project, status=SnapshotStatus.pending)
snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1)
snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2)
snapshot_image_1.change_status(SnapshotStatus.active)
snapshot_image_2.change_status(SnapshotStatus.active)
snapshot_image_1.change_status(SnapshotStatus.invalidated)
assert Snapshot.query.get(snapshot.id).status == SnapshotStatus.invalidated
def test_snapshot_failed(self):
project = self.create_project()
plan_1 = self.create_plan(project)
plan_2 = self.create_plan(project)
snapshot = self.create_snapshot(project, status=SnapshotStatus.failed)
snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1)
snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2)
snapshot_image_1.change_status(SnapshotStatus.active)
snapshot_image_2.change_status(SnapshotStatus.active)
assert Snapshot.query.get(snapshot.id).status == SnapshotStatus.failed
class TestSnapshotImageTestCase(TestCase):
def test_get_snapshot_image_independent(self):
project = self.create_project()
plan = self.create_plan(project)
snapshot = self.create_snapshot(project)
db.session.add(ProjectOption(
project_id=project.id,
name='snapshot.current',
value=snapshot.id.hex,
))
snapshot_image = self.create_snapshot_image(snapshot, plan)
assert snapshot_image == SnapshotImage.get(plan, snapshot.id)
def test_get_snapshot_image_dependent(self):
project = self.create_project()
plan_1 = self.create_plan(project)
plan_2 = self.create_plan(project)
plan_1.snapshot_plan_id = plan_2.id
snapshot = self.create_snapshot(project)
db.session.add(ProjectOption(
project_id=project.id,
name='snapshot.current',
value=snapshot.id.hex,
))
snapshot_image_1 = self.create_snapshot_image(snapshot, plan_1)
snapshot_image_2 = self.create_snapshot_image(snapshot, plan_2)
assert snapshot_image_2 == SnapshotImage.get(plan_1, snapshot.id)
assert snapshot_image_2 == SnapshotImage.get(plan_2, snapshot.id)
def test_get_snapshot_image_given_snapshot(self):
project = self.create_project()
plan = self.create_plan(project)
snapshot = self.create_snapshot(project)
current_snapshot = self.create_snapshot(project)
db.session.add(ProjectOption(
project_id=project.id,
name='snapshot.current',
value=current_snapshot.id.hex,
))
snapshot_image = self.create_snapshot_image(snapshot, plan)
current_snapshot_image = self.create_snapshot_image(current_snapshot, plan)
assert snapshot_image == SnapshotImage.get(plan, snapshot.id)
assert current_snapshot_image == SnapshotImage.get(plan, current_snapshot.id)
| 42.989796 | 85 | 0.718016 | 496 | 4,213 | 5.810484 | 0.08871 | 0.166898 | 0.112422 | 0.087786 | 0.832408 | 0.811589 | 0.785219 | 0.74254 | 0.724497 | 0.724497 | 0 | 0.01095 | 0.197959 | 4,213 | 97 | 86 | 43.43299 | 0.841965 | 0 | 0 | 0.696203 | 0 | 0 | 0.011393 | 0 | 0 | 0 | 0 | 0 | 0.126582 | 1 | 0.075949 | false | 0 | 0.050633 | 0 | 0.151899 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.