hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6f44c212e6238d072e7826e8ed0fa9e8976340a2 | 2,240 | py | Python | tests/unit/test_initialization.py | X-rayLaser/multi-directional-mdrnn | 70b0e1c2e07b5f476c264c6700e8d34d41a2ce10 | [
"MIT"
] | 12 | 2020-02-27T08:34:44.000Z | 2022-03-15T12:22:32.000Z | tests/unit/test_initialization.py | X-rayLaser/multi-directional-mdrnn | 70b0e1c2e07b5f476c264c6700e8d34d41a2ce10 | [
"MIT"
] | 1 | 2021-02-23T16:29:05.000Z | 2021-05-07T15:05:54.000Z | tests/unit/test_initialization.py | X-rayLaser/multi-directional-mdrnn | 70b0e1c2e07b5f476c264c6700e8d34d41a2ce10 | [
"MIT"
] | 2 | 2020-03-31T15:44:06.000Z | 2021-05-10T15:26:35.000Z | from unittest.case import TestCase
from mdrnn import MDRNN, InvalidParamsError
class MDRNNInitializationTests(TestCase):
def make_rnn(self, **kwargs):
return MDRNN(**kwargs)
def assert_invalid_instances(self, *kwargs):
for kwargs in kwargs:
self.assertRaises(InvalidParamsError, lambda: self.make_rnn(**kwargs))
def test_with_invalid_input_dim(self):
self.assert_invalid_instances(dict(units=10, input_shape=(3, 5, -1)),
dict(units=10, input_shape=(3, 5, 0)),
dict(units=10, input_shape=(3, 5, 10**10)))
def test_with_invalid_units(self):
self.assert_invalid_instances(dict(units=-1, input_shape=(3, 5, 1)),
dict(units=0, input_shape=(3, 5, 1)),
dict(units=10**10, input_shape=(3, 5, 1)))
def test_with_invalid_number_of_dimensions(self):
args = tuple([1] * 10**4)
self.assert_invalid_instances(dict(units=1, input_shape=(1,)),
dict(units=1, input_shape=args))
class MDGRUInitializationTests(TestCase):
def make_rnn(self, **kwargs):
from mdrnn._layers.gru import MDGRU
return MDGRU(**kwargs)
def assert_invalid_instances(self, *kwargs):
for kwargs in kwargs:
self.assertRaises(InvalidParamsError, lambda: self.make_rnn(**kwargs))
def test_with_invalid_input_dim(self):
self.assert_invalid_instances(dict(units=10, input_shape=(3, 5, -1)),
dict(units=10, input_shape=(3, 5, 0)),
dict(units=10, input_shape=(3, 5, 10**10)))
def test_with_invalid_units(self):
self.assert_invalid_instances(dict(units=-1, input_shape=(3, 5, 1)),
dict(units=0, input_shape=(3, 5, 1)),
dict(units=10**10, input_shape=(3, 5, 1)))
def test_with_invalid_number_of_dimensions(self):
args = tuple([1] * 10**4)
self.assert_invalid_instances(dict(units=1, input_shape=(1,)),
dict(units=1, input_shape=args))
| 42.264151 | 82 | 0.573214 | 274 | 2,240 | 4.463504 | 0.160584 | 0.117743 | 0.107931 | 0.117743 | 0.855274 | 0.855274 | 0.809485 | 0.809485 | 0.809485 | 0.809485 | 0 | 0.05148 | 0.30625 | 2,240 | 52 | 83 | 43.076923 | 0.735521 | 0 | 0 | 0.820513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25641 | 1 | 0.25641 | false | 0 | 0.076923 | 0.025641 | 0.435897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
6f5657b0d4b32c077424fb2941f3fc80c25fd70c | 11,973 | py | Python | utils/attack_lowdim_utils.py | kingabzpro/label_representations | bfd11c5769c9253cb761889347a51a08d0a2e2ad | [
"MIT"
] | 18 | 2021-05-05T04:53:02.000Z | 2022-03-09T21:44:37.000Z | utils/attack_lowdim_utils.py | kingabzpro/label_representations | bfd11c5769c9253cb761889347a51a08d0a2e2ad | [
"MIT"
] | null | null | null | utils/attack_lowdim_utils.py | kingabzpro/label_representations | bfd11c5769c9253cb761889347a51a08d0a2e2ad | [
"MIT"
] | 7 | 2021-06-10T18:56:17.000Z | 2022-03-24T16:39:16.000Z | import torch
import torch.nn as nn
# Checks if the nearest neighbor of the output is the target
def is_nn_target(output, target, mels):
mse = nn.MSELoss(reduction="none")
num_classes = mels.shape[0]
mse_dists = mse(output.repeat(num_classes, 1), mels).mean(-1) # low-dim label is 1D
output_NN = mels[torch.argmin(mse_dists)]
return (output_NN - target).abs().sum() < 1e-5
# FGSM attack
def fgsm_attack(image, epsilon, data_grad):
# Collect the element-wise sign of the data gradient
sign_data_grad = data_grad.sign()
# Create the perturbed image by adjusting each pixel of the input image
perturbed_image = image + epsilon * sign_data_grad
# Adding clipping to maintain image range
perturbed_image = torch.clamp(
perturbed_image, image.min().item(), image.max().item()
)
# Return the perturbed image
return perturbed_image
# FGSM (untargeted)
def test_fgsm_untargeted(model, device, test_loader, epsilon, mels):
# Accuracy counter
correct = 0
adv_examples = []
for data, target in test_loader:
# Send the data and label to the device
data, target = data.to(device), target.to(device)
# Set requires_grad attribute of tensor. Important for Attack
data.requires_grad = True
# Forward pass the data through the model
output = model(data)
# Calculate the loss
init_loss = nn.functional.smooth_l1_loss(output, target)
# If the initial prediction is wrong, dont bother attacking, just move on
if not is_nn_target(output, target, mels):
continue
elif epsilon == 0:
correct += 1
if len(adv_examples) < 30:
# Save some examples for visualization later
adv_ex = data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), init_loss.item(), adv_ex))
continue
# Zero all existing gradients
model.zero_grad()
# Calculate gradients of model in backward pass
init_loss.backward()
# Collect datagrad
data_grad = data.grad.data
# Call FGSM Attack
perturbed_data = fgsm_attack(data, epsilon, data_grad)
# Re-classify the perturbed image
output = model(perturbed_data)
# Check for success
if is_nn_target(output, target, mels):
correct += 1
elif len(adv_examples) < 30:
# Save some adv examples for visualization later
final_loss = nn.functional.smooth_l1_loss(output, target)
adv_ex = perturbed_data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), final_loss.item(), adv_ex))
# Calculate final accuracy for this epsilon
final_acc = correct / float(len(test_loader))
print(
"Epsilon: {}\tTest Accuracy = {} / {} = {}".format(
epsilon, correct, len(test_loader), final_acc
)
)
# Return the accuracy and an adversarial example
return final_acc, adv_examples
# FGSM (targeted)
def test_fgsm_targeted(model, num_classes, device, test_loader, epsilon, mels):
# Accuracy counter
correct = 0
adv_examples = []
gen = torch.manual_seed(444)
for data, target in test_loader:
# Send the data and label to the device
data, target = data.to(device), target.to(device)
adv_target_idx = torch.randint(0, num_classes, (1,), generator=gen).item()
mel = mels[adv_target_idx : adv_target_idx + 1]
while (mel - target).abs().sum() < 1e-5:
adv_target_idx = torch.randint(0, num_classes, (1,), generator=gen).item()
mel = mels[adv_target_idx : adv_target_idx + 1]
adv_target = mel.clone().to(device)
# Set requires_grad attribute of tensor. Important for Attack
data.requires_grad = True
# Forward pass the data through the model
output = model(data)
# Calculate the loss
init_loss = nn.functional.smooth_l1_loss(output, target)
# If the initial prediction is wrong, dont bother attacking, just move on
if not is_nn_target(output, target, mels):
continue
elif epsilon == 0:
correct += 1
if len(adv_examples) < 30:
# Save some examples for visualization later
adv_ex = data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), init_loss.item(), adv_ex))
continue
adv_loss = nn.functional.smooth_l1_loss(output, adv_target)
# Zero all existing gradients
model.zero_grad()
# Calculate gradients of model in backward pass
adv_loss.backward()
# Collect datagrad
data_grad = data.grad.data
# Call FGSM Attack
perturbed_data = fgsm_attack(data, -epsilon, data_grad)
# Re-classify the perturbed image
output = model(perturbed_data)
# Check for success
if is_nn_target(output, target, mels):
correct += 1
elif len(adv_examples) < 30:
# Save some adv examples for visualization later
final_loss = nn.functional.smooth_l1_loss(output, target)
adv_ex = perturbed_data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), final_loss.item(), adv_ex))
# Calculate final accuracy for this epsilon
final_acc = correct / float(len(test_loader))
print(
"Epsilon: {}\tTest Accuracy = {} / {} = {}".format(
epsilon, correct, len(test_loader), final_acc
)
)
# Return the accuracy and an adversarial example
return final_acc, adv_examples
# Basic iterative attack
def iterative_attack(image, last_perturbed_image, epsilon, alpha, data_grad):
# Collect the element-wise sign of the data gradient
sign_data_grad = data_grad.sign()
# Create the perturbed image by adjusting each pixel of the input image
perturbed_image = last_perturbed_image + alpha * sign_data_grad
# Adding clipping to maintain [-epsilon,epsilon] range for accumulated gradients
total_grad = perturbed_image - image
total_grad = torch.clamp(total_grad, -epsilon, epsilon)
perturbed_image = image + total_grad
# perturbed_image = torch.clamp(perturbed_image, image.min().item(), image.max().item())
# Return the perturbed image
return perturbed_image.clone()
# iterative (untargeted)
def test_iterative_untargeted(
model, device, test_loader, mels, epsilon, alpha, num_steps
):
# Accuracy counter
correct = 0
adv_examples = []
for data, target in test_loader:
# Send the data and label to the device
data, target = data.to(device), target.to(device)
# Set requires_grad attribute of tensor. Important for Attack
data.requires_grad = True
# Forward pass the data through the model
output = model(data)
# Calculate the loss
init_loss = nn.functional.smooth_l1_loss(output, target)
# If the initial prediction is wrong, dont bother attacking, just move on
if not is_nn_target(output, target, mels):
continue
elif epsilon == 0:
correct += 1
if len(adv_examples) < 30:
# Save some examples for visualization later
adv_ex = data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), init_loss.item(), adv_ex))
continue
orig_data = data.clone()
perturbed_data = data
for i in range(num_steps):
# Calculate the loss
loss = nn.functional.smooth_l1_loss(output, target)
# Zero all existing gradients
model.zero_grad()
# Calculate gradients of model in backward pass
loss.backward()
# Collect datagrad
data_grad = perturbed_data.grad.data
# Call Iterative Attack
perturbed_data.data = iterative_attack(
orig_data, perturbed_data, epsilon, alpha, data_grad
)
# Re-classify the perturbed image
output = model(perturbed_data)
# Check for success
if is_nn_target(output, target, mels):
correct += 1
elif len(adv_examples) < 30:
# Save some adv examples for visualization later
final_loss = nn.functional.smooth_l1_loss(output, target)
adv_ex = perturbed_data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), final_loss.item(), adv_ex))
# Calculate final accuracy for this epsilon
final_acc = correct / float(len(test_loader))
print(
"Epsilon: {}\tTest Accuracy = {} / {} = {}".format(
epsilon, correct, len(test_loader), final_acc
)
)
# Return the accuracy and an adversarial example
return final_acc, adv_examples
# iterative (targeted)
def test_iterative_targeted(
model, num_classes, device, test_loader, mels, epsilon, alpha, num_steps
):
# Accuracy counter
correct = 0
adv_examples = []
gen = torch.manual_seed(444)
for data, target in test_loader:
# Send the data and label to the device
data, target = data.to(device), target.to(device)
adv_target_idx = torch.randint(0, num_classes, (1,), generator=gen).item()
mel = mels[adv_target_idx : adv_target_idx + 1]
while (mel - target).abs().sum() < 1e-5:
adv_target_idx = torch.randint(0, num_classes, (1,), generator=gen).item()
mel = mels[adv_target_idx : adv_target_idx + 1]
adv_target = mel.clone().to(device)
# Set requires_grad attribute of tensor. Important for Attack
data.requires_grad = True
# Forward pass the data through the model
output = model(data)
# Get the index of the max log-probability
init_loss = nn.functional.smooth_l1_loss(output, target)
# If the initial prediction is wrong, dont bother attacking, just move on
if not is_nn_target(output, target, mels):
continue
elif epsilon == 0:
correct += 1
if len(adv_examples) < 30:
# Save some examples for visualization later
adv_ex = data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), init_loss.item(), adv_ex))
continue
orig_data = data.clone()
perturbed_data = data
for i in range(num_steps):
# Calculate the loss
loss = nn.functional.smooth_l1_loss(output, adv_target)
# Zero all existing gradients
model.zero_grad()
# Calculate gradients of model in backward pass
loss.backward()
# Collect datagrad
data_grad = perturbed_data.grad.data
# Call Iterative Attack
perturbed_data.data = iterative_attack(
orig_data, perturbed_data, epsilon, -alpha, data_grad
)
# Re-classify the perturbed image
output = model(perturbed_data)
# Check for success
if is_nn_target(output, target, mels):
correct += 1
elif len(adv_examples) < 30:
# Save some adv examples for visualization later
final_loss = nn.functional.smooth_l1_loss(output, target)
adv_ex = perturbed_data.squeeze().detach().cpu().numpy()
adv_examples.append((init_loss.item(), final_loss.item(), adv_ex))
# Calculate final accuracy for this epsilon
final_acc = correct / float(len(test_loader))
print(
"Epsilon: {}\tTest Accuracy = {} / {} = {}".format(
epsilon, correct, len(test_loader), final_acc
)
)
# Return the accuracy and an adversarial example
return final_acc, adv_examples
| 38.375 | 92 | 0.627412 | 1,492 | 11,973 | 4.855898 | 0.108579 | 0.042512 | 0.019876 | 0.033402 | 0.91332 | 0.899379 | 0.89579 | 0.879503 | 0.879503 | 0.879503 | 0 | 0.008276 | 0.283471 | 11,973 | 311 | 93 | 38.498392 | 0.836228 | 0.258498 | 0 | 0.761905 | 0 | 0 | 0.019104 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037037 | false | 0 | 0.010582 | 0 | 0.084656 | 0.021164 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6f73559629b69668759c2da6d35e3dcefdd5d294 | 17,427 | py | Python | vendor/packages/nose/functional_tests/test_loader.py | DESHRAJ/fjord | 8899b6286b23347c9b024334e61c33fe133e836d | [
"BSD-3-Clause"
] | 2 | 2019-08-19T17:08:47.000Z | 2019-10-05T11:37:02.000Z | vendor/packages/nose/functional_tests/test_loader.py | DESHRAJ/fjord | 8899b6286b23347c9b024334e61c33fe133e836d | [
"BSD-3-Clause"
] | 1 | 2021-12-13T20:55:07.000Z | 2021-12-13T20:55:07.000Z | vendor/packages/nose/functional_tests/test_loader.py | DESHRAJ/fjord | 8899b6286b23347c9b024334e61c33fe133e836d | [
"BSD-3-Clause"
] | 1 | 2019-11-02T23:29:13.000Z | 2019-11-02T23:29:13.000Z | import os
import sys
import unittest
from difflib import ndiff
from cStringIO import StringIO
from nose.config import Config
from nose.plugins.manager import PluginManager
from nose.plugins.skip import Skip
from nose import loader
from nose import suite
from nose.result import _TextTestResult
try:
# 2.7+
from unittest.runner import _WritelnDecorator
except ImportError:
from unittest import _WritelnDecorator
support = os.path.abspath(os.path.join(os.path.dirname(__file__), 'support'))
class TestNoseTestLoader(unittest.TestCase):
def setUp(self):
self._mods = sys.modules.copy()
suite.ContextSuiteFactory.suiteClass = TreePrintContextSuite
def tearDown(self):
to_del = [ m for m in sys.modules.keys() if
m not in self._mods ]
if to_del:
for mod in to_del:
del sys.modules[mod]
sys.modules.update(self._mods)
suite.ContextSuiteFactory.suiteClass = suite.ContextSuite
def test_load_from_name_file(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package1')
l = loader.TestLoader(workingDir=wd)
file_suite = l.loadTestsFromName('tests/test_example_function.py')
file_suite(res)
assert not res.errors, res.errors
assert not res.failures, res.failures
def test_load_from_name_dot(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package1')
l = loader.TestLoader(workingDir=wd)
dir_suite = l.loadTestsFromName('.')
dir_suite(res)
assert not res.errors, res.errors
assert not res.failures, res.failures
def test_load_from_name_file_callable(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package1')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromName(
'tests/test_example_function.py:test_times_two')
suite(res)
assert not res.errors, res.errors
assert not res.failures, res.failures
self.assertEqual(res.testsRun, 1)
def test_fixture_context(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package2')
l = loader.TestLoader(workingDir=wd)
dir_suite = l.loadTestsFromName('.')
dir_suite(res)
m = sys.modules['test_pak']
print "test pak state", m.state
assert not res.errors, res.errors
assert not res.failures, res.failures
self.assertEqual(res.testsRun, 5)
# Expected order of calls
expect = ['test_pak.setup',
'test_pak.test_mod.setup',
'test_pak.test_mod.test_add',
'test_pak.test_mod.test_minus',
'test_pak.test_mod.teardown',
'test_pak.test_sub.setup',
'test_pak.test_sub.test_mod.setup',
'test_pak.test_sub.test_mod.TestMaths.setup_class',
'test_pak.test_sub.test_mod.TestMaths.setup',
'test_pak.test_sub.test_mod.TestMaths.test_div',
'test_pak.test_sub.test_mod.TestMaths.teardown',
'test_pak.test_sub.test_mod.TestMaths.setup',
'test_pak.test_sub.test_mod.TestMaths.test_two_two',
'test_pak.test_sub.test_mod.TestMaths.teardown',
'test_pak.test_sub.test_mod.TestMaths.teardown_class',
'test_pak.test_sub.test_mod.test',
'test_pak.test_sub.test_mod.teardown',
'test_pak.test_sub.teardown',
'test_pak.teardown']
self.assertEqual(len(m.state), len(expect))
for item in m.state:
self.assertEqual(item, expect.pop(0))
def test_fixture_context_name_is_module(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package2')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromName('test_pak.test_mod')
suite(res)
assert 'test_pak' in sys.modules, \
"Context did not load test_pak"
m = sys.modules['test_pak']
print "test pak state", m.state
expect = ['test_pak.setup',
'test_pak.test_mod.setup',
'test_pak.test_mod.test_add',
'test_pak.test_mod.test_minus',
'test_pak.test_mod.teardown',
'test_pak.teardown']
self.assertEqual(len(m.state), len(expect))
for item in m.state:
self.assertEqual(item, expect.pop(0))
def test_fixture_context_name_is_test_function(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package2')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromName('test_pak.test_mod:test_add')
suite(res)
assert 'test_pak' in sys.modules, \
"Context did not load test_pak"
m = sys.modules['test_pak']
print "test pak state", m.state
expect = ['test_pak.setup',
'test_pak.test_mod.setup',
'test_pak.test_mod.test_add',
'test_pak.test_mod.teardown',
'test_pak.teardown']
self.assertEqual(len(m.state), len(expect))
for item in m.state:
self.assertEqual(item, expect.pop(0))
def test_fixture_context_name_is_test_class(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package2')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromName(
'test_pak.test_sub.test_mod:TestMaths')
suite(res)
assert 'test_pak' in sys.modules, \
"Context did not load test_pak"
m = sys.modules['test_pak']
# print "test pak state", m.state
expect = ['test_pak.setup',
'test_pak.test_sub.setup',
'test_pak.test_sub.test_mod.setup',
'test_pak.test_sub.test_mod.TestMaths.setup_class',
'test_pak.test_sub.test_mod.TestMaths.setup',
'test_pak.test_sub.test_mod.TestMaths.test_div',
'test_pak.test_sub.test_mod.TestMaths.teardown',
'test_pak.test_sub.test_mod.TestMaths.setup',
'test_pak.test_sub.test_mod.TestMaths.test_two_two',
'test_pak.test_sub.test_mod.TestMaths.teardown',
'test_pak.test_sub.test_mod.TestMaths.teardown_class',
'test_pak.test_sub.test_mod.teardown',
'test_pak.test_sub.teardown',
'test_pak.teardown']
self.assertEqual(m.state, expect, diff(expect, m.state))
def test_fixture_context_name_is_test_class_test(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package2')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromName(
'test_pak.test_sub.test_mod:TestMaths.test_div')
suite(res)
assert 'test_pak' in sys.modules, \
"Context not load test_pak"
m = sys.modules['test_pak']
print "test pak state", m.state
expect = ['test_pak.setup',
'test_pak.test_sub.setup',
'test_pak.test_sub.test_mod.setup',
'test_pak.test_sub.test_mod.TestMaths.setup_class',
'test_pak.test_sub.test_mod.TestMaths.setup',
'test_pak.test_sub.test_mod.TestMaths.test_div',
'test_pak.test_sub.test_mod.TestMaths.teardown',
'test_pak.test_sub.test_mod.TestMaths.teardown_class',
'test_pak.test_sub.test_mod.teardown',
'test_pak.test_sub.teardown',
'test_pak.teardown']
self.assertEqual(m.state, expect, diff(expect, m.state))
def test_fixture_context_multiple_names(self):
res = unittest.TestResult()
wd = os.path.join(support, 'package2')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromNames(
['test_pak.test_sub.test_mod:TestMaths.test_div',
'test_pak.test_sub.test_mod:TestMaths.test_two_two',
'test_pak.test_mod:test_add'])
print suite
suite(res)
assert not res.errors, res.errors
assert not res.failures, res.failures
assert 'test_pak' in sys.modules, \
"Context not load test_pak"
m = sys.modules['test_pak']
print "test pak state", m.state
expect = ['test_pak.setup',
'test_pak.test_sub.setup',
'test_pak.test_sub.test_mod.setup',
'test_pak.test_sub.test_mod.TestMaths.setup_class',
'test_pak.test_sub.test_mod.TestMaths.setup',
'test_pak.test_sub.test_mod.TestMaths.test_div',
'test_pak.test_sub.test_mod.TestMaths.teardown',
'test_pak.test_sub.test_mod.TestMaths.setup',
'test_pak.test_sub.test_mod.TestMaths.test_two_two',
'test_pak.test_sub.test_mod.TestMaths.teardown',
'test_pak.test_sub.test_mod.TestMaths.teardown_class',
'test_pak.test_sub.test_mod.teardown',
'test_pak.test_sub.teardown',
'test_pak.test_mod.setup',
'test_pak.test_mod.test_add',
'test_pak.test_mod.teardown',
'test_pak.teardown']
self.assertEqual(m.state, expect, diff(expect, m.state))
def test_fixture_context_multiple_names_some_common_ancestors(self):
stream = _WritelnDecorator(StringIO())
res = _TextTestResult(stream, 0, 2)
wd = os.path.join(support, 'ltfn')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromNames(
['test_pak1.test_mod',
'test_pak2:test_two_two',
'test_pak1:test_one_one'])
print suite
suite(res)
res.printErrors()
print stream.getvalue()
assert not res.errors, res.errors
assert not res.failures, res.failures
assert 'state' in sys.modules, \
"Context not load state module"
m = sys.modules['state']
print "state", m.called
expect = ['test_pak1.setup',
'test_pak1.test_mod.setup',
'test_pak1.test_mod.test_one_mod_one',
'test_pak1.test_mod.teardown',
'test_pak1.test_one_one',
'test_pak1.teardown',
'test_pak2.setup',
'test_pak2.test_two_two',
'test_pak2.teardown']
self.assertEqual(m.called, expect, diff(expect, m.called))
def test_fixture_context_multiple_names_no_common_ancestors(self):
stream = _WritelnDecorator(StringIO())
res = _TextTestResult(stream, 0, 2)
wd = os.path.join(support, 'ltfn')
l = loader.TestLoader(workingDir=wd)
suite = l.loadTestsFromNames(
['test_pak1.test_mod',
'test_pak2:test_two_two',
'test_mod'])
print suite
suite(res)
res.printErrors()
print stream.getvalue()
assert not res.errors, res.errors
assert not res.failures, res.failures
assert 'state' in sys.modules, \
"Context not load state module"
m = sys.modules['state']
print "state", m.called
expect = ['test_pak1.setup',
'test_pak1.test_mod.setup',
'test_pak1.test_mod.test_one_mod_one',
'test_pak1.test_mod.teardown',
'test_pak1.teardown',
'test_pak2.setup',
'test_pak2.test_two_two',
'test_pak2.teardown',
'test_mod.setup',
'test_mod.test_mod',
'test_mod.teardown']
self.assertEqual(m.called, expect, diff(expect, m.called))
def test_mod_setup_fails_no_tests_run(self):
ctx = os.path.join(support, 'ctx')
l = loader.TestLoader(workingDir=ctx)
suite = l.loadTestsFromName('mod_setup_fails.py')
res = unittest.TestResult()
suite(res)
assert res.errors
assert not res.failures, res.failures
assert res.testsRun == 0, \
"Expected to run 0 tests but ran %s" % res.testsRun
def test_mod_setup_skip_no_tests_run_no_errors(self):
config = Config(plugins=PluginManager(plugins=[Skip()]))
ctx = os.path.join(support, 'ctx')
l = loader.TestLoader(workingDir=ctx, config=config)
suite = l.loadTestsFromName('mod_setup_skip.py')
res = unittest.TestResult()
suite(res)
assert not suite.was_setup, "Suite setup did not fail"
assert not res.errors, res.errors
assert not res.failures, res.failures
assert res.skipped
assert res.testsRun == 0, \
"Expected to run 0 tests but ran %s" % res.testsRun
def test_mod_import_skip_one_test_no_errors(self):
config = Config(plugins=PluginManager(plugins=[Skip()]))
ctx = os.path.join(support, 'ctx')
l = loader.TestLoader(workingDir=ctx, config=config)
suite = l.loadTestsFromName('mod_import_skip.py')
res = unittest.TestResult()
suite(res)
assert not res.errors, res.errors
assert not res.failures, res.failures
assert res.testsRun == 1, \
"Expected to run 1 tests but ran %s" % res.testsRun
def test_failed_import(self):
ctx = os.path.join(support, 'ctx')
l = loader.TestLoader(workingDir=ctx)
suite = l.loadTestsFromName('no_such_module.py')
res = _TextTestResult(
stream=_WritelnDecorator(sys.stdout),
descriptions=0, verbosity=1)
suite(res)
print res.errors
res.printErrors()
assert res.errors, "Expected errors but got none"
assert not res.failures, res.failures
assert res.testsRun == 1, \
"Expected to run 1 tests but ran %s" % res.testsRun
def test_failed_import_module_name(self):
ctx = os.path.join(support, 'ctx')
l = loader.TestLoader(workingDir=ctx)
suite = l.loadTestsFromName('no_such_module')
res = _TextTestResult(
stream=_WritelnDecorator(sys.stdout),
descriptions=0, verbosity=1)
suite(res)
print res.errors
res.printErrors()
assert res.errors, "Expected errors but got none"
assert not res.failures, res.failures
err = res.errors[0][0].test.exc_class
assert err is ImportError, \
"Expected import error, got %s" % err
def test_load_nonsense_name(self):
ctx = os.path.join(support, 'ctx')
l = loader.TestLoader(workingDir=ctx)
suite = l.loadTestsFromName('fred!')
res = _TextTestResult(
stream=_WritelnDecorator(sys.stdout),
descriptions=0, verbosity=1)
suite(res)
print res.errors
assert res.errors, "Expected errors but got none"
assert not res.failures, res.failures
def test_generator_with_closure(self):
"""Test that a generator test can employ a closure
Issue #3. If the generator binds early, the last value
of the closure will be seen for each generated test and
the tests will fail.
"""
gen = os.path.join(support, 'gen')
l = loader.TestLoader(workingDir=gen)
suite = l.loadTestsFromName('test')
res = _TextTestResult(
stream=_WritelnDecorator(sys.stdout),
descriptions=0, verbosity=1)
suite(res)
assert not res.errors
self.assertEqual(res.testsRun, 5)
def test_issue_269(self):
"""Test classes that raise exceptions in __init__ do not stop test run
"""
wdir = os.path.join(support, 'issue269')
l = loader.TestLoader(workingDir=wdir)
suite = l.loadTestsFromName('test_bad_class')
res = _TextTestResult(
stream=_WritelnDecorator(sys.stdout),
descriptions=0, verbosity=1)
suite(res)
print res.errors
self.assertEqual(len(res.errors), 1)
assert 'raise Exception("pow")' in res.errors[0][1]
# used for comparing lists
def diff(a, b):
return '\n' + '\n'.join([ l for l in ndiff(a, b)
if not l.startswith(' ') ])
# used for context debugging
class TreePrintContextSuite(suite.ContextSuite):
indent = ''
def setUp(self):
print self, 'setup -->'
suite.ContextSuite.setUp(self)
TreePrintContextSuite.indent += ' '
def tearDown(self):
TreePrintContextSuite.indent = TreePrintContextSuite.indent[:-2]
try:
suite.ContextSuite.tearDown(self)
finally:
print self, 'teardown <--'
def __repr__(self):
return '%s<%s>' % (self.indent,
getattr(self.context, '__name__', self.context))
__str__ = __repr__
if __name__ == '__main__':
#import logging
#logging.basicConfig() #level=logging.DEBUG)
#logging.getLogger('nose.suite').setLevel(logging.DEBUG)
unittest.main()
| 38.55531 | 78 | 0.599415 | 2,091 | 17,427 | 4.775705 | 0.093735 | 0.070799 | 0.073803 | 0.070098 | 0.79311 | 0.786601 | 0.777689 | 0.773984 | 0.761466 | 0.751051 | 0 | 0.006032 | 0.296035 | 17,427 | 451 | 79 | 38.640798 | 0.807956 | 0.012796 | 0 | 0.730366 | 0 | 0 | 0.246848 | 0.165098 | 0 | 0 | 0 | 0 | 0.149215 | 0 | null | null | 0 | 0.052356 | null | null | 0.057592 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6f7a7bd3d3ade8261f95dd77e76094d109e11fc1 | 349 | py | Python | tests/internal/instance_type/test_instance_type_p2_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | null | null | null | tests/internal/instance_type/test_instance_type_p2_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | null | null | null | tests/internal/instance_type/test_instance_type_p2_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | 1 | 2021-12-15T11:58:22.000Z | 2021-12-15T11:58:22.000Z |
# Testing module instance_type.p2
import pytest
import ec2_compare.internal.instance_type.p2
def test_get_internal_data_instance_type_p2_get_instances_list():
assert len(ec2_compare.internal.instance_type.p2.get_instances_list()) > 0
def test_get_internal_data_instance_type_p2_get():
assert len(ec2_compare.internal.instance_type.p2.get) > 0
| 34.9 | 76 | 0.848138 | 56 | 349 | 4.839286 | 0.339286 | 0.265683 | 0.309963 | 0.250923 | 0.826568 | 0.826568 | 0.612546 | 0.612546 | 0.612546 | 0 | 0 | 0.034056 | 0.074499 | 349 | 9 | 77 | 38.777778 | 0.804954 | 0.088825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 1 | 0.333333 | true | 0 | 0.333333 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 10 |
48a79c2fb14f20efc90f18c09b7507ca78168112 | 74,381 | py | Python | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/global_/state/__init__.py | ckishimo/napalm-yang | 8f2bd907bd3afcde3c2f8e985192de74748baf6c | [
"Apache-2.0"
] | 64 | 2016-10-20T15:47:18.000Z | 2021-11-11T11:57:32.000Z | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/global_/state/__init__.py | ckishimo/napalm-yang | 8f2bd907bd3afcde3c2f8e985192de74748baf6c | [
"Apache-2.0"
] | 126 | 2016-10-05T10:36:14.000Z | 2019-05-15T08:43:23.000Z | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/global_/state/__init__.py | ckishimo/napalm-yang | 8f2bd907bd3afcde3c2f8e985192de74748baf6c | [
"Apache-2.0"
] | 63 | 2016-11-07T15:23:08.000Z | 2021-09-22T14:41:16.000Z | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/global/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines state for ISIS global router.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__authentication_check",
"__instance",
"__net",
"__maximum_area_addresses",
"__level_capability",
"__max_ecmp_paths",
"__poi_tlv",
"__iid_tlv",
"__fast_flooding",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__authentication_check = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="authentication-check",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__instance = YANGDynClass(
base=six.text_type,
default=six.text_type("0"),
is_leaf=True,
yang_name="instance",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=False,
)
self.__net = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}"
},
)
),
is_leaf=False,
yang_name="net",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:net",
is_config=False,
)
self.__maximum_area_addresses = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
default=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
)(
3
),
is_leaf=True,
yang_name="maximum-area-addresses",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__level_capability = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LEVEL_1": {}, "LEVEL_2": {}, "LEVEL_1_2": {}},
),
default=six.text_type("LEVEL_1_2"),
is_leaf=True,
yang_name="level-capability",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:level-type",
is_config=False,
)
self.__max_ecmp_paths = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="max-ecmp-paths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__poi_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="poi-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__iid_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="iid-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__fast_flooding = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="fast-flooding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"global",
"state",
]
def _get_authentication_check(self):
"""
Getter method for authentication_check, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/authentication_check (boolean)
YANG Description: When set to true, reject all ISIS protocol PDUs that either have a mismatch
in authentication-type or authentication-key.
"""
return self.__authentication_check
def _set_authentication_check(self, v, load=False):
"""
Setter method for authentication_check, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/authentication_check (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_authentication_check is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_authentication_check() directly.
YANG Description: When set to true, reject all ISIS protocol PDUs that either have a mismatch
in authentication-type or authentication-key.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="authentication-check",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """authentication_check must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="authentication-check", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__authentication_check = t
if hasattr(self, "_set"):
self._set()
def _unset_authentication_check(self):
self.__authentication_check = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="authentication-check",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_instance(self):
"""
Getter method for instance, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/instance (string)
YANG Description: ISIS Instance.
"""
return self.__instance
def _set_instance(self, v, load=False):
"""
Setter method for instance, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/instance (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance() directly.
YANG Description: ISIS Instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
default=six.text_type("0"),
is_leaf=True,
yang_name="instance",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """instance must be of a type compatible with string""",
"defined-type": "string",
"generated-type": """YANGDynClass(base=six.text_type, default=six.text_type("0"), is_leaf=True, yang_name="instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)""",
}
)
self.__instance = t
if hasattr(self, "_set"):
self._set()
def _unset_instance(self):
self.__instance = YANGDynClass(
base=six.text_type,
default=six.text_type("0"),
is_leaf=True,
yang_name="instance",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=False,
)
def _get_net(self):
"""
Getter method for net, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/net (oc-isis-types:net)
YANG Description: ISIS network entity title (NET). The first 8 bits are usually
49 (private AFI), next 16 bits represent area, next 48 bits represent
system id and final 8 bits are set to 0.
"""
return self.__net
def _set_net(self, v, load=False):
"""
Setter method for net, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/net (oc-isis-types:net)
If this variable is read-only (config: false) in the
source YANG file, then _set_net is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_net() directly.
YANG Description: ISIS network entity title (NET). The first 8 bits are usually
49 (private AFI), next 16 bits represent area, next 48 bits represent
system id and final 8 bits are set to 0.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}"
},
)
),
is_leaf=False,
yang_name="net",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:net",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """net must be of a type compatible with oc-isis-types:net""",
"defined-type": "oc-isis-types:net",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}'})), is_leaf=False, yang_name="net", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-isis-types:net', is_config=False)""",
}
)
self.__net = t
if hasattr(self, "_set"):
self._set()
def _unset_net(self):
self.__net = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}"
},
)
),
is_leaf=False,
yang_name="net",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:net",
is_config=False,
)
def _get_maximum_area_addresses(self):
"""
Getter method for maximum_area_addresses, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/maximum_area_addresses (uint8)
YANG Description: Maximum areas supported.
"""
return self.__maximum_area_addresses
def _set_maximum_area_addresses(self, v, load=False):
"""
Setter method for maximum_area_addresses, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/maximum_area_addresses (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_maximum_area_addresses is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_maximum_area_addresses() directly.
YANG Description: Maximum areas supported.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
default=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
)(
3
),
is_leaf=True,
yang_name="maximum-area-addresses",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """maximum_area_addresses must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(3), is_leaf=True, yang_name="maximum-area-addresses", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__maximum_area_addresses = t
if hasattr(self, "_set"):
self._set()
def _unset_maximum_area_addresses(self):
self.__maximum_area_addresses = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
default=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
)(
3
),
is_leaf=True,
yang_name="maximum-area-addresses",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_level_capability(self):
"""
Getter method for level_capability, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/level_capability (oc-isis-types:level-type)
YANG Description: ISIS level capability(level-1, level-2,vlevel-1-2).
"""
return self.__level_capability
def _set_level_capability(self, v, load=False):
"""
Setter method for level_capability, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/level_capability (oc-isis-types:level-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_level_capability is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_level_capability() directly.
YANG Description: ISIS level capability(level-1, level-2,vlevel-1-2).
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LEVEL_1": {}, "LEVEL_2": {}, "LEVEL_1_2": {}},
),
default=six.text_type("LEVEL_1_2"),
is_leaf=True,
yang_name="level-capability",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:level-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """level_capability must be of a type compatible with oc-isis-types:level-type""",
"defined-type": "oc-isis-types:level-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'LEVEL_1': {}, 'LEVEL_2': {}, 'LEVEL_1_2': {}},), default=six.text_type("LEVEL_1_2"), is_leaf=True, yang_name="level-capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-isis-types:level-type', is_config=False)""",
}
)
self.__level_capability = t
if hasattr(self, "_set"):
self._set()
def _unset_level_capability(self):
self.__level_capability = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LEVEL_1": {}, "LEVEL_2": {}, "LEVEL_1_2": {}},
),
default=six.text_type("LEVEL_1_2"),
is_leaf=True,
yang_name="level-capability",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:level-type",
is_config=False,
)
def _get_max_ecmp_paths(self):
"""
Getter method for max_ecmp_paths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/max_ecmp_paths (uint8)
YANG Description: ISIS max-paths count.
"""
return self.__max_ecmp_paths
def _set_max_ecmp_paths(self, v, load=False):
"""
Setter method for max_ecmp_paths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/max_ecmp_paths (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_ecmp_paths is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_ecmp_paths() directly.
YANG Description: ISIS max-paths count.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="max-ecmp-paths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """max_ecmp_paths must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="max-ecmp-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__max_ecmp_paths = t
if hasattr(self, "_set"):
self._set()
def _unset_max_ecmp_paths(self):
self.__max_ecmp_paths = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="max-ecmp-paths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_poi_tlv(self):
"""
Getter method for poi_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/poi_tlv (boolean)
YANG Description: ISIS purge TLV. When set to true, a TLV is added to purges to record
the system ID of the IS generating the purge.
"""
return self.__poi_tlv
def _set_poi_tlv(self, v, load=False):
"""
Setter method for poi_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/poi_tlv (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_poi_tlv is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_poi_tlv() directly.
YANG Description: ISIS purge TLV. When set to true, a TLV is added to purges to record
the system ID of the IS generating the purge.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="poi-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """poi_tlv must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="poi-tlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__poi_tlv = t
if hasattr(self, "_set"):
self._set()
def _unset_poi_tlv(self):
self.__poi_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="poi-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_iid_tlv(self):
"""
Getter method for iid_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/iid_tlv (boolean)
YANG Description: ISIS Instance Identifier TLV. When set to trues, the IID-TLV identifies
the unique instance as well as the topology/topologies to which the
PDU applies.
"""
return self.__iid_tlv
def _set_iid_tlv(self, v, load=False):
"""
Setter method for iid_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/iid_tlv (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_iid_tlv is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_iid_tlv() directly.
YANG Description: ISIS Instance Identifier TLV. When set to trues, the IID-TLV identifies
the unique instance as well as the topology/topologies to which the
PDU applies.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="iid-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """iid_tlv must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="iid-tlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__iid_tlv = t
if hasattr(self, "_set"):
self._set()
def _unset_iid_tlv(self):
self.__iid_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="iid-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_fast_flooding(self):
"""
Getter method for fast_flooding, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/fast_flooding (boolean)
YANG Description: When set to true, IS will always flood the LSP that triggered an SPF
before the router actually runs the SPF computation.
"""
return self.__fast_flooding
def _set_fast_flooding(self, v, load=False):
"""
Setter method for fast_flooding, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/fast_flooding (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_fast_flooding is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fast_flooding() directly.
YANG Description: When set to true, IS will always flood the LSP that triggered an SPF
before the router actually runs the SPF computation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="fast-flooding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """fast_flooding must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="fast-flooding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__fast_flooding = t
if hasattr(self, "_set"):
self._set()
def _unset_fast_flooding(self):
self.__fast_flooding = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="fast-flooding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
authentication_check = __builtin__.property(_get_authentication_check)
instance = __builtin__.property(_get_instance)
net = __builtin__.property(_get_net)
maximum_area_addresses = __builtin__.property(_get_maximum_area_addresses)
level_capability = __builtin__.property(_get_level_capability)
max_ecmp_paths = __builtin__.property(_get_max_ecmp_paths)
poi_tlv = __builtin__.property(_get_poi_tlv)
iid_tlv = __builtin__.property(_get_iid_tlv)
fast_flooding = __builtin__.property(_get_fast_flooding)
_pyangbind_elements = OrderedDict(
[
("authentication_check", authentication_check),
("instance", instance),
("net", net),
("maximum_area_addresses", maximum_area_addresses),
("level_capability", level_capability),
("max_ecmp_paths", max_ecmp_paths),
("poi_tlv", poi_tlv),
("iid_tlv", iid_tlv),
("fast_flooding", fast_flooding),
]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/global/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines state for ISIS global router.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__authentication_check",
"__instance",
"__net",
"__maximum_area_addresses",
"__level_capability",
"__max_ecmp_paths",
"__poi_tlv",
"__iid_tlv",
"__fast_flooding",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__authentication_check = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="authentication-check",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__instance = YANGDynClass(
base=six.text_type,
default=six.text_type("0"),
is_leaf=True,
yang_name="instance",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=False,
)
self.__net = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}"
},
)
),
is_leaf=False,
yang_name="net",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:net",
is_config=False,
)
self.__maximum_area_addresses = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
default=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
)(
3
),
is_leaf=True,
yang_name="maximum-area-addresses",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__level_capability = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LEVEL_1": {}, "LEVEL_2": {}, "LEVEL_1_2": {}},
),
default=six.text_type("LEVEL_1_2"),
is_leaf=True,
yang_name="level-capability",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:level-type",
is_config=False,
)
self.__max_ecmp_paths = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="max-ecmp-paths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
self.__poi_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="poi-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__iid_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="iid-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
self.__fast_flooding = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="fast-flooding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"global",
"state",
]
def _get_authentication_check(self):
"""
Getter method for authentication_check, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/authentication_check (boolean)
YANG Description: When set to true, reject all ISIS protocol PDUs that either have a mismatch
in authentication-type or authentication-key.
"""
return self.__authentication_check
def _set_authentication_check(self, v, load=False):
"""
Setter method for authentication_check, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/authentication_check (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_authentication_check is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_authentication_check() directly.
YANG Description: When set to true, reject all ISIS protocol PDUs that either have a mismatch
in authentication-type or authentication-key.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="authentication-check",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """authentication_check must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="authentication-check", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__authentication_check = t
if hasattr(self, "_set"):
self._set()
def _unset_authentication_check(self):
self.__authentication_check = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="authentication-check",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_instance(self):
"""
Getter method for instance, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/instance (string)
YANG Description: ISIS Instance.
"""
return self.__instance
def _set_instance(self, v, load=False):
"""
Setter method for instance, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/instance (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance() directly.
YANG Description: ISIS Instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
default=six.text_type("0"),
is_leaf=True,
yang_name="instance",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """instance must be of a type compatible with string""",
"defined-type": "string",
"generated-type": """YANGDynClass(base=six.text_type, default=six.text_type("0"), is_leaf=True, yang_name="instance", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='string', is_config=False)""",
}
)
self.__instance = t
if hasattr(self, "_set"):
self._set()
def _unset_instance(self):
self.__instance = YANGDynClass(
base=six.text_type,
default=six.text_type("0"),
is_leaf=True,
yang_name="instance",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="string",
is_config=False,
)
def _get_net(self):
"""
Getter method for net, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/net (oc-isis-types:net)
YANG Description: ISIS network entity title (NET). The first 8 bits are usually
49 (private AFI), next 16 bits represent area, next 48 bits represent
system id and final 8 bits are set to 0.
"""
return self.__net
def _set_net(self, v, load=False):
"""
Setter method for net, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/net (oc-isis-types:net)
If this variable is read-only (config: false) in the
source YANG file, then _set_net is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_net() directly.
YANG Description: ISIS network entity title (NET). The first 8 bits are usually
49 (private AFI), next 16 bits represent area, next 48 bits represent
system id and final 8 bits are set to 0.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}"
},
)
),
is_leaf=False,
yang_name="net",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:net",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """net must be of a type compatible with oc-isis-types:net""",
"defined-type": "oc-isis-types:net",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}'})), is_leaf=False, yang_name="net", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-isis-types:net', is_config=False)""",
}
)
self.__net = t
if hasattr(self, "_set"):
self._set()
def _unset_net(self):
self.__net = YANGDynClass(
base=TypedListType(
allowed_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "[a-fA-F0-9]{2}(\\.[a-fA-F0-9]{4}){3,9}\\.[a-fA-F0-9]{2}"
},
)
),
is_leaf=False,
yang_name="net",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:net",
is_config=False,
)
def _get_maximum_area_addresses(self):
"""
Getter method for maximum_area_addresses, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/maximum_area_addresses (uint8)
YANG Description: Maximum areas supported.
"""
return self.__maximum_area_addresses
def _set_maximum_area_addresses(self, v, load=False):
"""
Setter method for maximum_area_addresses, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/maximum_area_addresses (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_maximum_area_addresses is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_maximum_area_addresses() directly.
YANG Description: Maximum areas supported.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
default=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
)(
3
),
is_leaf=True,
yang_name="maximum-area-addresses",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """maximum_area_addresses must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), default=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8)(3), is_leaf=True, yang_name="maximum-area-addresses", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__maximum_area_addresses = t
if hasattr(self, "_set"):
self._set()
def _unset_maximum_area_addresses(self):
self.__maximum_area_addresses = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
default=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
)(
3
),
is_leaf=True,
yang_name="maximum-area-addresses",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_level_capability(self):
"""
Getter method for level_capability, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/level_capability (oc-isis-types:level-type)
YANG Description: ISIS level capability(level-1, level-2,vlevel-1-2).
"""
return self.__level_capability
def _set_level_capability(self, v, load=False):
"""
Setter method for level_capability, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/level_capability (oc-isis-types:level-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_level_capability is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_level_capability() directly.
YANG Description: ISIS level capability(level-1, level-2,vlevel-1-2).
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LEVEL_1": {}, "LEVEL_2": {}, "LEVEL_1_2": {}},
),
default=six.text_type("LEVEL_1_2"),
is_leaf=True,
yang_name="level-capability",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:level-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """level_capability must be of a type compatible with oc-isis-types:level-type""",
"defined-type": "oc-isis-types:level-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'LEVEL_1': {}, 'LEVEL_2': {}, 'LEVEL_1_2': {}},), default=six.text_type("LEVEL_1_2"), is_leaf=True, yang_name="level-capability", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-isis-types:level-type', is_config=False)""",
}
)
self.__level_capability = t
if hasattr(self, "_set"):
self._set()
def _unset_level_capability(self):
self.__level_capability = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"LEVEL_1": {}, "LEVEL_2": {}, "LEVEL_1_2": {}},
),
default=six.text_type("LEVEL_1_2"),
is_leaf=True,
yang_name="level-capability",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-isis-types:level-type",
is_config=False,
)
def _get_max_ecmp_paths(self):
"""
Getter method for max_ecmp_paths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/max_ecmp_paths (uint8)
YANG Description: ISIS max-paths count.
"""
return self.__max_ecmp_paths
def _set_max_ecmp_paths(self, v, load=False):
"""
Setter method for max_ecmp_paths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/max_ecmp_paths (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_max_ecmp_paths is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_max_ecmp_paths() directly.
YANG Description: ISIS max-paths count.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="max-ecmp-paths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """max_ecmp_paths must be of a type compatible with uint8""",
"defined-type": "uint8",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name="max-ecmp-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint8', is_config=False)""",
}
)
self.__max_ecmp_paths = t
if hasattr(self, "_set"):
self._set()
def _unset_max_ecmp_paths(self):
self.__max_ecmp_paths = YANGDynClass(
base=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..255"]}, int_size=8
),
is_leaf=True,
yang_name="max-ecmp-paths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint8",
is_config=False,
)
def _get_poi_tlv(self):
"""
Getter method for poi_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/poi_tlv (boolean)
YANG Description: ISIS purge TLV. When set to true, a TLV is added to purges to record
the system ID of the IS generating the purge.
"""
return self.__poi_tlv
def _set_poi_tlv(self, v, load=False):
"""
Setter method for poi_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/poi_tlv (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_poi_tlv is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_poi_tlv() directly.
YANG Description: ISIS purge TLV. When set to true, a TLV is added to purges to record
the system ID of the IS generating the purge.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="poi-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """poi_tlv must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="poi-tlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__poi_tlv = t
if hasattr(self, "_set"):
self._set()
def _unset_poi_tlv(self):
self.__poi_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="poi-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_iid_tlv(self):
"""
Getter method for iid_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/iid_tlv (boolean)
YANG Description: ISIS Instance Identifier TLV. When set to trues, the IID-TLV identifies
the unique instance as well as the topology/topologies to which the
PDU applies.
"""
return self.__iid_tlv
def _set_iid_tlv(self, v, load=False):
"""
Setter method for iid_tlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/iid_tlv (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_iid_tlv is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_iid_tlv() directly.
YANG Description: ISIS Instance Identifier TLV. When set to trues, the IID-TLV identifies
the unique instance as well as the topology/topologies to which the
PDU applies.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="iid-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """iid_tlv must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("false"), is_leaf=True, yang_name="iid-tlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__iid_tlv = t
if hasattr(self, "_set"):
self._set()
def _unset_iid_tlv(self):
self.__iid_tlv = YANGDynClass(
base=YANGBool,
default=YANGBool("false"),
is_leaf=True,
yang_name="iid-tlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
def _get_fast_flooding(self):
"""
Getter method for fast_flooding, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/fast_flooding (boolean)
YANG Description: When set to true, IS will always flood the LSP that triggered an SPF
before the router actually runs the SPF computation.
"""
return self.__fast_flooding
def _set_fast_flooding(self, v, load=False):
"""
Setter method for fast_flooding, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/global/state/fast_flooding (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_fast_flooding is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_fast_flooding() directly.
YANG Description: When set to true, IS will always flood the LSP that triggered an SPF
before the router actually runs the SPF computation.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="fast-flooding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """fast_flooding must be of a type compatible with boolean""",
"defined-type": "boolean",
"generated-type": """YANGDynClass(base=YANGBool, default=YANGBool("true"), is_leaf=True, yang_name="fast-flooding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='boolean', is_config=False)""",
}
)
self.__fast_flooding = t
if hasattr(self, "_set"):
self._set()
def _unset_fast_flooding(self):
self.__fast_flooding = YANGDynClass(
base=YANGBool,
default=YANGBool("true"),
is_leaf=True,
yang_name="fast-flooding",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="boolean",
is_config=False,
)
authentication_check = __builtin__.property(_get_authentication_check)
instance = __builtin__.property(_get_instance)
net = __builtin__.property(_get_net)
maximum_area_addresses = __builtin__.property(_get_maximum_area_addresses)
level_capability = __builtin__.property(_get_level_capability)
max_ecmp_paths = __builtin__.property(_get_max_ecmp_paths)
poi_tlv = __builtin__.property(_get_poi_tlv)
iid_tlv = __builtin__.property(_get_iid_tlv)
fast_flooding = __builtin__.property(_get_fast_flooding)
_pyangbind_elements = OrderedDict(
[
("authentication_check", authentication_check),
("instance", instance),
("net", net),
("maximum_area_addresses", maximum_area_addresses),
("level_capability", level_capability),
("max_ecmp_paths", max_ecmp_paths),
("poi_tlv", poi_tlv),
("iid_tlv", iid_tlv),
("fast_flooding", fast_flooding),
]
)
| 42.237933 | 608 | 0.594802 | 8,048 | 74,381 | 5.252237 | 0.032182 | 0.066004 | 0.048356 | 0.05427 | 0.991128 | 0.985995 | 0.985995 | 0.985995 | 0.985995 | 0.985995 | 0 | 0.007207 | 0.302322 | 74,381 | 1,760 | 609 | 42.261932 | 0.807338 | 0.201947 | 0 | 0.887262 | 0 | 0.01757 | 0.270828 | 0.10778 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04246 | false | 0 | 0.010981 | 0 | 0.090044 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
48bfe21a15b2b47cfb7dd3f1d29c3808d6bc8ff6 | 43,759 | py | Python | zxspectrum_video_memory.py | vika-sonne/electronix-python | 50d221a3375010242d4705bc87b8df758a514460 | [
"MIT"
] | null | null | null | zxspectrum_video_memory.py | vika-sonne/electronix-python | 50d221a3375010242d4705bc87b8df758a514460 | [
"MIT"
] | null | null | null | zxspectrum_video_memory.py | vika-sonne/electronix-python | 50d221a3375010242d4705bc87b8df758a514460 | [
"MIT"
] | null | null | null |
import array
from PIL import Image
class VideoDevice:
'''ZX Spectrum video output device
Consists of video memory & export tools to image.
'''
WIDTH, HEIGHT = 256, 192 # pixels
PIXELS_COUNT = WIDTH * HEIGHT
PIXELS_SIZE = (WIDTH // 8) * HEIGHT # memory size (8 pixels/byte), bytes
ATTRIBUTES_SIZE = 0x300 # memory size, bytes
ATTRIBUTES_BASE_ADDRESS = PIXELS_SIZE # device based address
MEMORY_SIZE = PIXELS_SIZE + ATTRIBUTES_SIZE
MEMORY_BASE_ADDRESS = 0x4000 # absolute address
# RGB colors (2 * 8 colors: 0..2 fg bits & 3..5 bg bits): for normal & brightness (6'th bit = 1) for attribute byte processing
PALETTE = (
(0, 0, 0), (0, 0, 0xc0), (0xc0, 0, 0), (0xc0, 0, 0xc0), (0, 0xc0, 0), (0, 0xc0, 0xc0), (0xc0, 0xc0, 0), (0xc0, 0xc0, 0xc0),
(0, 0, 0), (0, 0, 0xff), (0xff, 0, 0), (0xff, 0, 0xff), (0, 0xff, 0), (0, 0xff, 0xff), (0xff, 0xff, 0), (0xff, 0xff, 0xff)
)
def __init__(self):
self.memory = array.array('B', [0] * VideoDevice.MEMORY_SIZE) # video memory, bytes
def _pixel_to_video_memory(x: int, y: int) -> int:
"returns (0..0x17ff) pixel's byte address of video memory from x, y coordinates (0..31, 0..191)"
# |010| Y | X |
# |010|76|210|543|43210|
# | H | L |
return ((y << 5) & 0x1800) | ((y << 8) & 0x700) | ((y << 2) & 0xe0) | (x & 0x1f)
def _attribute_to_video_memory(x: int, y: int) -> int:
"returns (ATTRIBUTES_BASE_ADDRESS..ATTRIBUTES_BASE_ADDRESS+ATTRIBUTES_SIZE) attribute's byte address of video memory from x, y coordinates (0..31, 0..23)"
return VideoDevice.ATTRIBUTES_BASE_ADDRESS + y * 32 + x
def image_get(self, x=0, y=0, dx=32, dy=24) -> Image.Image:
'returns PIL Image from video memory'
if y < 0 or 0 < (y + dy) * 8 > VideoDevice.HEIGHT or x < 0 or 0 < (x + dx) > VideoDevice.WIDTH // 8:
raise Exception('Out of range (0, 0)-(32, 24): ({}, {})-({}, {})',format(x, y, x + dx, y + dy))
pixels_width, pixels_heigh = dx * 8, dy * 8
memory, image_data, colors = self.memory, array.array('B', [0] * ((pixels_width * pixels_heigh) * 3)), VideoDevice.PALETTE
for y2 in range(y * 8, (y + dy) * 8):
# process horizontal line
for x2 in range(x, x + dx):
# process device memory byte - 8 pixels: 7..0
attribute = memory[VideoDevice._attribute_to_video_memory(x2, y2 // 8)]
fg, bg = ((attribute >> 3) & 8) | attribute & 7, ((attribute >> 3) & 8) | ((attribute >> 3) & 7)
fg, bg = colors[fg], colors[bg] # RGB colors
buff = memory[VideoDevice._pixel_to_video_memory(x2, y2)] # device memory next 8 pixels
image_pixel_index = x2 * 8 + y2 * dx * 8 + 7 # image data to copy next 8 pixels
for xx in range(8):
# process bit - pixel
addr = (image_pixel_index - xx) * 3 # pixel address of image data
if (buff >> xx) & 1:
# bit is set # fg
image_data[addr] = fg[0]
image_data[addr + 1] = fg[1]
image_data[addr + 2] = fg[2]
else:
# bit is clear # bg
image_data[addr] = bg[0]
image_data[addr + 1] = bg[1]
image_data[addr + 2] = bg[2]
# image_data[x2 * 8 + (7 - xx) + y2 * 256] = (1 - ((memory[VideoDevice._pixel_to_video_memory(x2, y2)] & (1 << xx)) >> xx)) * 255
return Image.frombytes('RGB', (dx * 8, dy * 8), bytes(image_data))
def image_set(self, img: Image.Image):
'fill video memory from PIL image'
def color_diff(color1: tuple, color2: tuple) -> int:
'returns (0..) color difference: high - worse'
return (color1[0] - color2[0]) ** 2 + (color1[1] - color2[1]) ** 2 + (color1[2] - color2[2]) ** 2
def nearest_zx_color(color: tuple) -> int:
'returns index of nearest color of video device color space'
# get dict[eq: int, xz_color_index: int] # eq: high - worse
color_eq = tuple( (c[0] - color[0]) ** 2 + (c[1] - color[1]) ** 2 + (c[2] - color[2]) ** 2 for c in VideoDevice.PALETTE )
# get zx color index by minimum color difference
return color_eq.index(min(color_eq))
# print(f'{img.width}x{img.height}')
memory = self.memory
# process a image_data by 8x8 pixels block
# calculate attributes according to video device color resolution
for y in range(img.height // 8):
for x in range(img.width // 8):
# calc attributes: bg & fg
colors_count = {}
for yy in range(8):
for xx in range(8):
pixel = img.getpixel((x * 8 + xx, y * 8 + yy))
if colors_count.get(pixel):
colors_count[pixel] += 1
else:
colors_count[pixel] = 1
if len(colors_count) > 2:
# out of color resolution # define most popular two colors
color1 = max(colors_count.values()) # get color count
color1 = tuple(next(k for k, v in colors_count.items() if v == color1))
del colors_count[color1]
color2 = max(colors_count.values()) # get color count
color2 = tuple(next(k for k, v in colors_count.items() if v == color2))
elif len(colors_count) == 2:
color1, color2 = tuple(colors_count.keys())
else:
color1 = color2 = tuple(colors_count.keys())[0]
# set attribute
# convert colors to video device color space # prefer brightly colorspace
fg, bg = nearest_zx_color(color1), nearest_zx_color(color2)
memory[VideoDevice.ATTRIBUTES_BASE_ADDRESS + y * 32 + x] = (((bg | fg) & 8) << 3) | ((bg & 7) << 3) | (fg & 7)
# process pixels horizontal line by line
for yy in range(8):
# process pixels by 8 pixels: 7..0
buff = 0
for xx in range(8):
pixel = img.getpixel((x * 8 + xx, y * 8 + yy))
if pixel == color1:
buff |= 1 << (7 - xx) # fg
elif pixel == color2:
pass # bg
else:
# define nearest color for fg or bg
if color_diff(pixel, color1) < color_diff(pixel, color2):
buff |= 1 << (7 - xx) # fg
# set pixels
memory[VideoDevice._pixel_to_video_memory(x, y * 8 + yy)] = buff
def _dump(self, x: int, y: int, bin_format=False):
if bin_format:
# for x in range(8):
# print('{:02X}'.format(x), end=' ')
# print()
for y in range(8):
for x in range(1):
print('{:08b}'.format(self.memory[(y << 8) | x]), end=' ')
print()
else:
for x in range(32):
print('{:02X}'.format(x), end=' ')
print()
for y in range(8):
for x in range(32):
print('{:02x}'.format(self.memory[(y << 8) | x]), end=' ')
print()
if __name__ == '__main__':
import argparse
def speed_test():
import sys, time, statistics
print(f'{sys.version}')
vd = VideoDevice()
time_results = []
for _ in range(100):
t = time.process_time()
img = vd.image_get()
elapsed_time = time.process_time() - t
time_results.append(elapsed_time)
print(f'avg={statistics.mean(time_results):1.3f} s/image; {1 / statistics.mean(time_results):1.1f} images/s')
def save_video_memory_test(file_name: str):
TEST_VIDEO_MEMORY = [
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 254, 0, 15, 255, 255, 255, 255, 255, 255, 227, 241, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 255, 255, 224, 252, 62, 31, 15, 135, 227, 241, 224, 248, 124, 60, 46, 63, 143, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 227, 255, 248, 227, 248, 255, 255, 31, 255, 227, 241, 227, 252, 120, 255, 15, 255, 203, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 248, 255, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 248, 0, 7, 255, 255, 255, 255, 255, 255, 227, 241, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 255, 255, 227, 252, 127, 31, 31, 227, 227, 241, 227, 252, 120, 126, 14, 63, 199, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 225, 255, 240, 227, 252, 127, 255, 31, 255, 227, 241, 227, 252, 120, 255, 14, 63, 227, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 248, 254, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 240, 127, 147, 255, 255, 255, 255, 255, 255, 227, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 255, 255, 225, 252, 255, 143, 31, 227, 227, 241, 225, 252, 120, 255, 14, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 240, 127, 224, 227, 252, 63, 15, 15, 227, 227, 241, 227, 252, 124, 126, 30, 31, 195, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 248, 24, 63, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 240, 255, 227, 255, 255, 255, 255, 255, 255, 227, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 252, 0, 225, 248, 255, 143, 31, 227, 227, 241, 227, 252, 120, 255, 14, 3, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 249, 6, 1, 227, 252, 8, 31, 131, 3, 225, 241, 227, 252, 124, 16, 47, 7, 7, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, 0, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 225, 255, 225, 255, 255, 255, 255, 255, 255, 195, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 252, 0, 227, 248, 0, 31, 0, 7, 227, 241, 227, 252, 112, 255, 31, 32, 63, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 252, 0, 3, 227, 254, 64, 63, 136, 7, 226, 113, 227, 252, 126, 0, 15, 0, 15, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 65, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 227, 255, 241, 227, 15, 128, 255, 240, 63, 128, 113, 227, 1, 255, 1, 15, 192, 63, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 252, 16, 227, 248, 0, 14, 16, 3, 227, 241, 227, 252, 113, 255, 15, 128, 15, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 0, 47, 227, 255, 0, 255, 224, 31, 224, 113, 227, 252, 127, 33, 31, 200, 31, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 131, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 255, 243, 226, 15, 0, 127, 192, 15, 132, 113, 226, 0, 254, 0, 15, 0, 31, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 255, 248, 227, 248, 136, 78, 32, 67, 227, 241, 227, 252, 112, 255, 15, 225, 7, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 228, 255, 227, 255, 243, 255, 252, 127, 248, 241, 227, 252, 127, 199, 63, 240, 127, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 128, 63, 255, 255, 255, 255, 255, 255, 243, 241, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 199, 255, 255, 224, 14, 2, 63, 128, 71, 128, 113, 224, 8, 252, 4, 30, 1, 15, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 195, 255, 248, 227, 248, 255, 255, 31, 255, 227, 241, 227, 252, 121, 255, 31, 252, 7, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 40, 180, 188, 146, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 61, 253, 251, 223, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 122, 235, 89, 205, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 61, 247, 187, 222, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 185, 231, 221, 207, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 167, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 123, 239, 153, 238, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 79, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 61, 247, 187, 223, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 159, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 185, 244, 189, 206, 127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 91, 122, 123, 84, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 123, 242, 121, 238, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 243, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 249, 255, 231, 254, 239, 243, 223, 243, 159, 239, 159, 207, 31, 215, 63, 223, 61, 127, 159, 254, 127, 222, 126, 127, 63, 255, 255, 255,
255, 255, 255, 255, 254, 149, 249, 35, 248, 163, 228, 151, 228, 135, 228, 159, 34, 159, 18, 191, 243, 239, 233, 95, 197, 63, 127, 127, 127, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 247, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 251, 255, 247, 252, 207, 251, 132, 147, 191, 247, 223, 223, 127, 231, 191, 207, 126, 255, 191, 255, 127, 239, 127, 127, 191, 255, 255, 255,
255, 255, 255, 255, 255, 35, 254, 79, 249, 23, 249, 63, 249, 111, 233, 207, 180, 127, 180, 255, 227, 207, 242, 63, 200, 254, 126, 127, 191, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 251, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 253, 255, 231, 254, 223, 243, 200, 33, 223, 231, 159, 239, 191, 247, 63, 239, 156, 255, 223, 254, 127, 206, 126, 127, 63, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 191, 247, 255, 255, 63, 255, 63, 255, 239, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 167, 254, 159, 253, 51, 250, 95, 244, 239, 191, 223, 122, 127, 116, 253, 255, 127, 250, 127, 233, 254, 233, 233, 255, 255, 255, 255,
255, 255, 255, 255, 249, 255, 199, 252, 239, 247, 159, 255, 159, 239, 223, 207, 63, 231, 191, 207, 156, 255, 159, 255, 127, 222, 255, 127, 191, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 223, 199, 255, 255, 191, 255, 191, 255, 215, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 254, 137, 250, 39, 244, 75, 244, 143, 233, 55, 223, 239, 168, 191, 169, 62, 255, 191, 232, 159, 162, 63, 82, 210, 127, 255, 255, 255,
255, 255, 255, 255, 251, 254, 239, 253, 207, 251, 207, 255, 223, 247, 159, 223, 95, 215, 63, 223, 217, 255, 191, 238, 127, 239, 126, 127, 63, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 143, 175, 255, 255, 63, 255, 63, 255, 239, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 253, 57, 244, 231, 233, 211, 235, 215, 215, 167, 159, 207, 23, 63, 30, 94, 126, 63, 211, 159, 78, 94, 44, 172, 255, 255, 255, 255,
255, 255, 255, 255, 253, 255, 119, 250, 223, 243, 223, 255, 159, 231, 191, 239, 159, 239, 191, 175, 205, 255, 223, 246, 255, 142, 126, 255, 127, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 228, 159, 255, 255, 191, 255, 127, 254, 143, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 250, 254, 107, 249, 247, 227, 223, 231, 175, 199, 191, 223, 95, 207, 95, 158, 255, 127, 175, 230, 191, 158, 190, 62, 63, 255, 255, 255,
255, 255, 255, 255, 248, 252, 227, 253, 231, 235, 199, 251, 207, 215, 223, 143, 31, 207, 63, 223, 225, 255, 143, 199, 63, 223, 127, 127, 191, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 232, 63, 255, 255, 63, 255, 191, 255, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 253, 254, 119, 249, 207, 251, 239, 251, 223, 231, 223, 239, 159, 207, 191, 207, 62, 127, 223, 231, 127, 207, 127, 63, 191, 255, 255, 255,
255, 255, 255, 255, 254, 122, 245, 233, 231, 211, 243, 227, 215, 167, 143, 79, 151, 79, 143, 31, 203, 255, 231, 175, 62, 158, 126, 127, 63, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 87, 87, 71, 71, 71, 71, 71, 71, 87, 87, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 87, 87, 87, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 103, 103, 103, 103, 103, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 103, 103, 103, 103, 103, 103, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 103, 103, 103, 103, 103, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 79, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 71, 71, 71,
71, 71, 71, 71, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 71, 71, 71,
71, 71, 71, 71, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71
]
vd = VideoDevice()
vd.memory = array.array('B', TEST_VIDEO_MEMORY)
img = vd.image_get()
img.save(file_name)
def save_test_image(file_name: str):
from random import randint
from PIL import ImageDraw
img = Image.new('RGB', (VideoDevice.WIDTH, VideoDevice.HEIGHT))
d = ImageDraw.Draw(img)
for y in range(0, 180, 10):
for x in range(0, 256, 128):
d.text((x, y), "Hello World", fill=(randint(0, 256), randint(0, 256), randint(0, 256)))
img.save(file_name)
def load_and_save_image(file_name_load: str, file_name_save: str):
vd = VideoDevice()
img = Image.open(file_name_load)
vd.image_set(img)
img = vd.image_get()
img.save(file_name_save)
def parse_args():
parser = argparse.ArgumentParser(description='ZX Spectrum video utility. It allow import/export image from/to file')
subparsers = parser.add_subparsers(dest='cmd', help='choose subcomand to run')
subparser = subparsers.add_parser('s', help='run speed test to calculate "get image" operations per second')
subparser = subparsers.add_parser('w', help='generate "Hello World" labels with random colors and save to image file')
subparser.add_argument('-f', metavar='FILE_NAME', default='hello_world.png', help='image file name to save; default: hello_world.png')
subparser.add_argument('-l', action='store_true', help='load generated image file to ZX video memory and save video memory to image file')
subparser.add_argument('-s', metavar='FILE_NAME', default='hello_world_zx.png', help='image file name to save ZX video memory; default: hello_world_zx.png')
subparser = subparsers.add_parser('m', help='load test data into video memory')
subparser.add_argument('-s', metavar='FILE_NAME', default='video_device_test.png', help='image file name to save ZX video memory; default: video_device_test.png')
subparser = subparsers.add_parser('c', help='convert image with ZX video subsystem; image must have size 256x192 pixels')
subparser.add_argument('-f', metavar='FILE_NAME', default='test.png', help='image file name to load to ZX video memory; default: test.png')
subparser.add_argument('-s', metavar='FILE_NAME', default='test_zx.png', help='image file name to save from ZX video memory; default: test_zx.png')
ret = parser.parse_args()
return ret
args = parse_args()
if args.cmd == 's':
speed_test()
elif args.cmd == 'w':
save_test_image(args.f)
if args.l:
load_and_save_image(args.f, args.s)
elif args.cmd == 'm':
save_video_memory_test(args.s)
elif args.cmd == 'c':
load_and_save_image(args.f, args.s)
| 99.678815 | 164 | 0.58893 | 8,374 | 43,759 | 3.054932 | 0.043468 | 1.230396 | 1.814987 | 2.384333 | 0.783911 | 0.765812 | 0.754984 | 0.744508 | 0.720194 | 0.716519 | 0 | 0.590532 | 0.221394 | 43,759 | 438 | 165 | 99.906393 | 0.160312 | 0.04102 | 0 | 0.459318 | 0 | 0.007874 | 0.035389 | 0.004323 | 0 | 0 | 0.003118 | 0 | 0 | 1 | 0.034121 | false | 0.002625 | 0.018373 | 0 | 0.089239 | 0.020997 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
48c08acac80e3e128913dd2a38e045cd36045e3d | 7,461 | py | Python | sdk/python/pulumi_gcp/notebooks/outputs.py | dimpu47/pulumi-gcp | 38355de300a5768e11c49d344a8165ba0735deed | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_gcp/notebooks/outputs.py | dimpu47/pulumi-gcp | 38355de300a5768e11c49d344a8165ba0735deed | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_gcp/notebooks/outputs.py | dimpu47/pulumi-gcp | 38355de300a5768e11c49d344a8165ba0735deed | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
__all__ = [
'EnvironmentContainerImage',
'EnvironmentVmImage',
'InstanceAcceleratorConfig',
'InstanceContainerImage',
'InstanceVmImage',
]
@pulumi.output_type
class EnvironmentContainerImage(dict):
def __init__(__self__, *,
repository: str,
tag: Optional[str] = None):
"""
:param str repository: The path to the container image repository.
For example: gcr.io/{project_id}/{imageName}
:param str tag: The tag of the container image. If not specified, this defaults to the latest tag.
"""
pulumi.set(__self__, "repository", repository)
if tag is not None:
pulumi.set(__self__, "tag", tag)
@property
@pulumi.getter
def repository(self) -> str:
"""
The path to the container image repository.
For example: gcr.io/{project_id}/{imageName}
"""
return pulumi.get(self, "repository")
@property
@pulumi.getter
def tag(self) -> Optional[str]:
"""
The tag of the container image. If not specified, this defaults to the latest tag.
"""
return pulumi.get(self, "tag")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class EnvironmentVmImage(dict):
def __init__(__self__, *,
project: str,
image_family: Optional[str] = None,
image_name: Optional[str] = None):
"""
:param str project: The name of the Google Cloud project that this VM image belongs to.
Format: projects/{project_id}
:param str image_family: Use this VM image family to find the image; the newest image in this family will be used.
:param str image_name: Use VM image name to find the image.
"""
pulumi.set(__self__, "project", project)
if image_family is not None:
pulumi.set(__self__, "image_family", image_family)
if image_name is not None:
pulumi.set(__self__, "image_name", image_name)
@property
@pulumi.getter
def project(self) -> str:
"""
The name of the Google Cloud project that this VM image belongs to.
Format: projects/{project_id}
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="imageFamily")
def image_family(self) -> Optional[str]:
"""
Use this VM image family to find the image; the newest image in this family will be used.
"""
return pulumi.get(self, "image_family")
@property
@pulumi.getter(name="imageName")
def image_name(self) -> Optional[str]:
"""
Use VM image name to find the image.
"""
return pulumi.get(self, "image_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceAcceleratorConfig(dict):
def __init__(__self__, *,
core_count: float,
type: str):
"""
:param float core_count: Count of cores of this accelerator.
:param str type: Type of this accelerator.
Possible values are `ACCELERATOR_TYPE_UNSPECIFIED`, `NVIDIA_TESLA_K80`, `NVIDIA_TESLA_P100`, `NVIDIA_TESLA_V100`, `NVIDIA_TESLA_P4`, `NVIDIA_TESLA_T4`, `NVIDIA_TESLA_T4_VWS`, `NVIDIA_TESLA_P100_VWS`, `NVIDIA_TESLA_P4_VWS`, `TPU_V2`, and `TPU_V3`.
"""
pulumi.set(__self__, "core_count", core_count)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="coreCount")
def core_count(self) -> float:
"""
Count of cores of this accelerator.
"""
return pulumi.get(self, "core_count")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of this accelerator.
Possible values are `ACCELERATOR_TYPE_UNSPECIFIED`, `NVIDIA_TESLA_K80`, `NVIDIA_TESLA_P100`, `NVIDIA_TESLA_V100`, `NVIDIA_TESLA_P4`, `NVIDIA_TESLA_T4`, `NVIDIA_TESLA_T4_VWS`, `NVIDIA_TESLA_P100_VWS`, `NVIDIA_TESLA_P4_VWS`, `TPU_V2`, and `TPU_V3`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceContainerImage(dict):
def __init__(__self__, *,
repository: str,
tag: Optional[str] = None):
"""
:param str repository: The path to the container image repository.
For example: gcr.io/{project_id}/{imageName}
:param str tag: The tag of the container image. If not specified, this defaults to the latest tag.
"""
pulumi.set(__self__, "repository", repository)
if tag is not None:
pulumi.set(__self__, "tag", tag)
@property
@pulumi.getter
def repository(self) -> str:
"""
The path to the container image repository.
For example: gcr.io/{project_id}/{imageName}
"""
return pulumi.get(self, "repository")
@property
@pulumi.getter
def tag(self) -> Optional[str]:
"""
The tag of the container image. If not specified, this defaults to the latest tag.
"""
return pulumi.get(self, "tag")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceVmImage(dict):
def __init__(__self__, *,
project: str,
image_family: Optional[str] = None,
image_name: Optional[str] = None):
"""
:param str project: The name of the Google Cloud project that this VM image belongs to.
Format: projects/{project_id}
:param str image_family: Use this VM image family to find the image; the newest image in this family will be used.
:param str image_name: Use VM image name to find the image.
"""
pulumi.set(__self__, "project", project)
if image_family is not None:
pulumi.set(__self__, "image_family", image_family)
if image_name is not None:
pulumi.set(__self__, "image_name", image_name)
@property
@pulumi.getter
def project(self) -> str:
"""
The name of the Google Cloud project that this VM image belongs to.
Format: projects/{project_id}
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="imageFamily")
def image_family(self) -> Optional[str]:
"""
Use this VM image family to find the image; the newest image in this family will be used.
"""
return pulumi.get(self, "image_family")
@property
@pulumi.getter(name="imageName")
def image_name(self) -> Optional[str]:
"""
Use VM image name to find the image.
"""
return pulumi.get(self, "image_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| 34.382488 | 261 | 0.624983 | 922 | 7,461 | 4.816703 | 0.135575 | 0.044585 | 0.035127 | 0.05134 | 0.833146 | 0.833146 | 0.820086 | 0.820086 | 0.820086 | 0.820086 | 0 | 0.006461 | 0.273958 | 7,461 | 216 | 262 | 34.541667 | 0.813365 | 0.358531 | 0 | 0.773913 | 1 | 0 | 0.082276 | 0.016925 | 0 | 0 | 0 | 0 | 0 | 1 | 0.191304 | false | 0 | 0.043478 | 0.043478 | 0.426087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
48f9e937bdae1e0085b2506d3e29fed795604809 | 6,772 | py | Python | tests/unitary/LiquidityGaugeV4/test_transferFrom.py | hedgx/ribbonomics | 84a212a82eaaa2824ebe3c072413e143eaca02a2 | [
"MIT"
] | 2 | 2022-01-13T21:11:30.000Z | 2022-03-10T08:20:42.000Z | tests/unitary/LiquidityGaugeV4/test_transferFrom.py | hedgx/ribbonomics | 84a212a82eaaa2824ebe3c072413e143eaca02a2 | [
"MIT"
] | null | null | null | tests/unitary/LiquidityGaugeV4/test_transferFrom.py | hedgx/ribbonomics | 84a212a82eaaa2824ebe3c072413e143eaca02a2 | [
"MIT"
] | 2 | 2022-01-30T20:54:55.000Z | 2022-03-05T17:49:19.000Z | #!/usr/bin/python3
import brownie
import pytest
@pytest.fixture(scope="module", autouse=True)
def setup(accounts, gauge_controller, minter, gauge_v4, token, mock_lp_token):
token.set_minter(minter, {"from": accounts[0]})
gauge_controller.add_type(b"Liquidity", 10 ** 10, {"from": accounts[0]})
gauge_controller.add_gauge(gauge_v4, 0, 0, {"from": accounts[0]})
mock_lp_token.approve(gauge_v4, 2 ** 256 - 1, {"from": accounts[0]})
gauge_v4.deposit(10 ** 18, {"from": accounts[0]})
def test_sender_balance_decreases(accounts, gauge_v4):
sender_balance = gauge_v4.balanceOf(accounts[0])
amount = sender_balance // 4
gauge_v4.approve(accounts[1], amount, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], amount, {"from": accounts[1]})
assert gauge_v4.balanceOf(accounts[0]) == sender_balance - amount
def test_receiver_balance_increases(accounts, gauge_v4):
receiver_balance = gauge_v4.balanceOf(accounts[2])
amount = gauge_v4.balanceOf(accounts[0]) // 4
gauge_v4.approve(accounts[1], amount, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], amount, {"from": accounts[1]})
assert gauge_v4.balanceOf(accounts[2]) == receiver_balance + amount
def test_caller_balance_not_affected(accounts, gauge_v4):
caller_balance = gauge_v4.balanceOf(accounts[1])
amount = gauge_v4.balanceOf(accounts[0])
gauge_v4.approve(accounts[1], amount, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], amount, {"from": accounts[1]})
assert gauge_v4.balanceOf(accounts[1]) == caller_balance
def test_caller_approval_affected(accounts, gauge_v4):
approval_amount = gauge_v4.balanceOf(accounts[0])
transfer_amount = approval_amount // 4
gauge_v4.approve(accounts[1], approval_amount, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], transfer_amount, {"from": accounts[1]})
assert gauge_v4.allowance(accounts[0], accounts[1]) == approval_amount - transfer_amount
def test_receiver_approval_not_affected(accounts, gauge_v4):
approval_amount = gauge_v4.balanceOf(accounts[0])
transfer_amount = approval_amount // 4
gauge_v4.approve(accounts[1], approval_amount, {"from": accounts[0]})
gauge_v4.approve(accounts[2], approval_amount, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], transfer_amount, {"from": accounts[1]})
assert gauge_v4.allowance(accounts[0], accounts[2]) == approval_amount
def test_total_supply_not_affected(accounts, gauge_v4):
total_supply = gauge_v4.totalSupply()
amount = gauge_v4.balanceOf(accounts[0])
gauge_v4.approve(accounts[1], amount, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], amount, {"from": accounts[1]})
assert gauge_v4.totalSupply() == total_supply
def test_returns_true(accounts, gauge_v4):
amount = gauge_v4.balanceOf(accounts[0])
gauge_v4.approve(accounts[1], amount, {"from": accounts[0]})
tx = gauge_v4.transferFrom(accounts[0], accounts[2], amount, {"from": accounts[1]})
assert tx.return_value is True
def test_transfer_full_balance(accounts, gauge_v4):
amount = gauge_v4.balanceOf(accounts[0])
receiver_balance = gauge_v4.balanceOf(accounts[2])
gauge_v4.approve(accounts[1], amount, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], amount, {"from": accounts[1]})
assert gauge_v4.balanceOf(accounts[0]) == 0
assert gauge_v4.balanceOf(accounts[2]) == receiver_balance + amount
def test_transfer_zero_tokens(accounts, gauge_v4):
sender_balance = gauge_v4.balanceOf(accounts[0])
receiver_balance = gauge_v4.balanceOf(accounts[2])
gauge_v4.approve(accounts[1], sender_balance, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], 0, {"from": accounts[1]})
assert gauge_v4.balanceOf(accounts[0]) == sender_balance
assert gauge_v4.balanceOf(accounts[2]) == receiver_balance
def test_transfer_zero_tokens_without_approval(accounts, gauge_v4):
sender_balance = gauge_v4.balanceOf(accounts[0])
receiver_balance = gauge_v4.balanceOf(accounts[2])
gauge_v4.transferFrom(accounts[0], accounts[2], 0, {"from": accounts[1]})
assert gauge_v4.balanceOf(accounts[0]) == sender_balance
assert gauge_v4.balanceOf(accounts[2]) == receiver_balance
def test_insufficient_balance(accounts, gauge_v4):
balance = gauge_v4.balanceOf(accounts[0])
gauge_v4.approve(accounts[1], balance + 1, {"from": accounts[0]})
with brownie.reverts():
gauge_v4.transferFrom(accounts[0], accounts[2], balance + 1, {"from": accounts[1]})
def test_insufficient_approval(accounts, gauge_v4):
balance = gauge_v4.balanceOf(accounts[0])
gauge_v4.approve(accounts[1], balance - 1, {"from": accounts[0]})
with brownie.reverts():
gauge_v4.transferFrom(accounts[0], accounts[2], balance, {"from": accounts[1]})
def test_no_approval(accounts, gauge_v4):
balance = gauge_v4.balanceOf(accounts[0])
with brownie.reverts():
gauge_v4.transferFrom(accounts[0], accounts[2], balance, {"from": accounts[1]})
def test_infinite_approval(accounts, gauge_v4):
gauge_v4.approve(accounts[1], 2 ** 256 - 1, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[2], 10000, {"from": accounts[1]})
assert gauge_v4.allowance(accounts[0], accounts[1]) == 2 ** 256 - 1
def test_revoked_approval(accounts, gauge_v4):
balance = gauge_v4.balanceOf(accounts[0])
gauge_v4.approve(accounts[1], balance, {"from": accounts[0]})
gauge_v4.approve(accounts[1], 0, {"from": accounts[0]})
with brownie.reverts():
gauge_v4.transferFrom(accounts[0], accounts[2], balance, {"from": accounts[1]})
def test_transfer_to_self(accounts, gauge_v4):
sender_balance = gauge_v4.balanceOf(accounts[0])
amount = sender_balance // 4
gauge_v4.approve(accounts[0], sender_balance, {"from": accounts[0]})
gauge_v4.transferFrom(accounts[0], accounts[0], amount, {"from": accounts[0]})
assert gauge_v4.balanceOf(accounts[0]) == sender_balance
assert gauge_v4.allowance(accounts[0], accounts[0]) == sender_balance - amount
def test_transfer_to_self_no_approval(accounts, gauge_v4):
amount = gauge_v4.balanceOf(accounts[0])
with brownie.reverts():
gauge_v4.transferFrom(accounts[0], accounts[0], amount, {"from": accounts[0]})
def test_transfer_event_fires(accounts, gauge_v4):
amount = gauge_v4.balanceOf(accounts[0])
gauge_v4.approve(accounts[1], amount, {"from": accounts[0]})
tx = gauge_v4.transferFrom(accounts[0], accounts[2], amount, {"from": accounts[1]})
assert tx.events["Transfer"].values() == [accounts[0], accounts[2], amount]
| 37.005464 | 92 | 0.715298 | 914 | 6,772 | 5.080963 | 0.084245 | 0.143196 | 0.11025 | 0.165375 | 0.853144 | 0.796512 | 0.77627 | 0.742463 | 0.736003 | 0.72696 | 0 | 0.046508 | 0.133196 | 6,772 | 182 | 93 | 37.208791 | 0.744634 | 0.00251 | 0 | 0.54955 | 0 | 0 | 0.027095 | 0 | 0 | 0 | 0 | 0 | 0.153153 | 1 | 0.171171 | false | 0 | 0.018018 | 0 | 0.189189 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d2b461ba59173b56488d8d2d92f79fde9638df38 | 7,542 | py | Python | attack/MDAttack.py | leeyegy/pytorch-cifar | a8a39891c7f7a86b636cfc615a431a761dfd74c6 | [
"MIT"
] | null | null | null | attack/MDAttack.py | leeyegy/pytorch-cifar | a8a39891c7f7a86b636cfc615a431a761dfd74c6 | [
"MIT"
] | null | null | null | attack/MDAttack.py | leeyegy/pytorch-cifar | a8a39891c7f7a86b636cfc615a431a761dfd74c6 | [
"MIT"
] | null | null | null | import time
import sys
import numpy as np
import torch
import torch.optim as optim
from torch.autograd import Variable
import torch.nn as nn
def one_hot_tensor(y_batch_tensor, num_classes):
y_tensor = torch.cuda.FloatTensor(y_batch_tensor.size(0),
num_classes).fill_(0)
y_tensor[np.arange(len(y_batch_tensor)), y_batch_tensor] = 1.0
return y_tensor
def adv_check_and_update(X_cur, logits, y, not_correct, X_adv):
adv_pred = logits.max(1)[1]
nc = (adv_pred != y.data)
not_correct += nc.long()
X_adv[nc] = X_cur[nc]
def MD_attack(model,
X,
y,
epsilon=8. / 255.,
num_steps=40,
step_size=2. / 255.,
num_random_starts=1,
v_min=0.,
v_max=1.,
change_point=20,
first_step_size=16./255.,
num_classes=10):
epsilon = epsilon * (v_max - v_min)
step_size = step_size * (v_max - v_min)
first_step_size = first_step_size * (v_max - v_min)
assert num_steps >= change_point, 'step number must be greater than change point {}'.format(change_point)
nat_logits = model(X)
nat_pred = nat_logits.max(dim=1)[1]
nat_correct = (nat_pred == y).squeeze()
y_gt = one_hot_tensor(y, num_classes)
not_correct = torch.zeros_like(y)
X_adv = X.detach().clone()
for _ in range(max(num_random_starts, 1)):
for r in range(2):
X_pgd = Variable(X.data, requires_grad=True)
if num_random_starts:
random_noise = torch.FloatTensor(*X_pgd.shape).uniform_(-epsilon, epsilon).cuda()
X_pgd = Variable(X_pgd.data + random_noise, requires_grad=True)
for i in range(num_steps):
with torch.enable_grad():
logits = model(X_pgd)
z_max, max_idx = torch.max(logits * (1 - y_gt) - y_gt * 100000, dim=1)
z_y, _ = torch.max(logits * y_gt - (1 - y_gt) * 100000, dim=1)
if i<1:
loss_per_sample = z_y
elif i < change_point:
loss_per_sample = z_max if r else -z_y
else:
loss_per_sample = z_max - z_y
loss = torch.mean(loss_per_sample)
adv_check_and_update(X_pgd, logits, y, not_correct, X_adv)
loss.backward()
if i < 1:
eta = 2 * epsilon * X_pgd.grad.data.sign()
elif i < change_point:
eta = first_step_size * X_pgd.grad.data.sign()
else:
eta = step_size * X_pgd.grad.data.sign()
X_pgd = X_pgd.detach() + eta.detach()
X_pgd = torch.min(torch.max(X_pgd, X - epsilon), X + epsilon)
X_pgd = Variable(torch.clamp(X_pgd, v_min, v_max), requires_grad=True)
adv_check_and_update(X_pgd, model(X_pgd), y, not_correct, X_adv)
adv_correct = (not_correct == 0).squeeze()
return nat_correct, adv_correct, X_adv.detach().cpu().numpy()
def MDMT_attack(model,
X,
y,
epsilon=8. / 255.,
num_steps=40,
step_size=2. / 255.,
v_min=0.,
v_max=1.,
change_point=20,
first_step_size = 16. / 255.,
num_classes=10):
epsilon = epsilon * (v_max - v_min)
step_size = step_size * (v_max - v_min)
first_step_size = first_step_size * (v_max - v_min)
assert num_steps >= change_point, 'step number must be greater than change point {}'.format(change_point)
nat_logits = model(X)
nat_pred = nat_logits.max(dim=1)[1]
nat_correct = (nat_pred == y).squeeze()
not_correct = torch.zeros_like(y)
X_adv = X.detach().clone()
for t in range(num_classes):
targets = torch.zeros_like(y)
targets += t
y_tg = one_hot_tensor(targets, num_classes)
y_gt = one_hot_tensor(y, num_classes)
for r in range(2):
X_pgd = Variable(X.data, requires_grad=True)
random_noise = torch.FloatTensor(*X_pgd.shape).uniform_(-epsilon, epsilon).cuda()
X_pgd = Variable(X_pgd.data + random_noise, requires_grad=True)
for i in range(num_steps):
with torch.enable_grad():
logits = model(X_pgd)
z_t, _ = torch.max(y_tg * logits - (1 - y_tg) * 10000, dim=1)
z_y, _ = torch.max(y_gt * logits - (1 - y_gt) * 10000, dim=1)
if i<1:
loss = torch.mean(z_y)
elif i < change_point:
loss = torch.mean(z_t) if r else torch.mean(- z_y)
else:
loss = torch.mean(z_t - z_y)
adv_check_and_update(X_pgd, logits, y, not_correct, X_adv)
loss.backward()
if i < 1:
eta = 2 * epsilon * X_pgd.grad.data.sign()
elif i < change_point:
eta = first_step_size * X_pgd.grad.data.sign()
else:
eta = step_size * X_pgd.grad.data.sign()
X_pgd = X_pgd.detach() + eta.detach()
X_pgd = torch.min(torch.max(X_pgd, X - epsilon), X + epsilon)
X_pgd = Variable(torch.clamp(X_pgd, v_min, v_max), requires_grad=True)
adv_check_and_update(X_pgd, model(X_pgd), y, not_correct, X_adv)
adv_correct = (not_correct == 0).squeeze()
return nat_correct, adv_correct, X_adv.detach().cpu().numpy()
def PGD_attack(model,
X,
y,
epsilon=8. / 255.,
num_steps=40,
step_size=2. / 255.,
num_random_starts=2,
v_min=0.,
v_max=1.):
epsilon = epsilon * (v_max - v_min)
step_size = step_size * (v_max - v_min)
nat_logits = model(X)
nat_pred = nat_logits.max(dim=1)[1]
nat_correct = (nat_pred == y).squeeze()
y_gt = one_hot_tensor(y, 10)
not_correct = torch.zeros_like(y)
X_adv = X.detach().clone()
for _ in range(max(num_random_starts, 1)):
X_pgd = Variable(X.data, requires_grad=True)
if num_random_starts:
random_noise = torch.FloatTensor(*X_pgd.shape).uniform_(-epsilon, epsilon).cuda()
X_pgd = Variable(X_pgd.data + random_noise, requires_grad=True)
for i in range(num_steps):
with torch.enable_grad():
logits = model(X_pgd)
loss = nn.CrossEntropyLoss(reduction='none')(logits, y)
loss = torch.mean(loss)
adv_check_and_update(X_pgd, logits, y, not_correct, X_adv)
loss.backward()
eta = step_size * X_pgd.grad.data.sign()
X_pgd = X_pgd.detach() + eta.detach()
X_pgd = torch.min(torch.max(X_pgd, X - epsilon), X + epsilon)
X_pgd = Variable(torch.clamp(X_pgd, v_min, v_max), requires_grad=True)
adv_check_and_update(X_pgd, model(X_pgd), y, not_correct, X_adv)
adv_correct = (not_correct == 0).squeeze()
return nat_correct, adv_correct, X_adv.detach().cpu().numpy() | 38.090909 | 110 | 0.532352 | 1,023 | 7,542 | 3.621701 | 0.112414 | 0.052901 | 0.02969 | 0.017274 | 0.833738 | 0.79919 | 0.775978 | 0.764103 | 0.754386 | 0.754386 | 0 | 0.02342 | 0.360249 | 7,542 | 198 | 111 | 38.090909 | 0.744456 | 0 | 0 | 0.745342 | 0 | 0 | 0.013613 | 0 | 0 | 0 | 0 | 0 | 0.012422 | 1 | 0.031056 | false | 0 | 0.043478 | 0 | 0.099379 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
d2dadcc015d971fc6c972b1abc92a81fa2a873e8 | 6,844 | py | Python | test/test_simple_single_in_multi_out_architectures.py | kundajelab/fastISM | 1573feccba1ad5d9f1cee508f5bb03c4aa09bb2b | [
"MIT"
] | 12 | 2020-09-20T17:03:48.000Z | 2022-03-16T06:51:52.000Z | test/test_simple_single_in_multi_out_architectures.py | kundajelab/fastISM | 1573feccba1ad5d9f1cee508f5bb03c4aa09bb2b | [
"MIT"
] | 5 | 2020-10-24T20:43:45.000Z | 2022-02-25T19:40:47.000Z | test/test_simple_single_in_multi_out_architectures.py | kundajelab/fastISM | 1573feccba1ad5d9f1cee508f5bb03c4aa09bb2b | [
"MIT"
] | 2 | 2020-10-14T05:18:55.000Z | 2022-02-21T07:34:14.000Z | import tensorflow as tf
import unittest
from context import fastISM
class TestSimpleSingleInMultiOutArchitectures(unittest.TestCase):
def test_conv_two_fc(self):
# /- D -> y1
# inp -> C
# \_ D -> y2
inp = tf.keras.Input((100, 4))
x = tf.keras.layers.Conv1D(20, 3)(inp)
x = tf.keras.layers.Flatten()(x)
y1 = tf.keras.layers.Dense(1)(x)
y2 = tf.keras.layers.Dense(1)(x)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2])
fast_ism_model = fastISM.FastISM(
model, test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
def test_conv_three_fc(self):
# /- D -> y1
# inp -> C - D -> y2
# \_ D -> y3
inp = tf.keras.Input((100, 4))
x = tf.keras.layers.Conv1D(20, 3)(inp)
x = tf.keras.layers.Flatten()(x)
y1 = tf.keras.layers.Dense(1)(x)
y2 = tf.keras.layers.Dense(1)(x)
y3 = tf.keras.layers.Dense(1)(x)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2, y3])
fast_ism_model = fastISM.FastISM(
model, test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
def test_conv_fc_two_head(self):
# inp -> C -> D -> D -> y1
# \_ D -> y2
inp = tf.keras.Input((100, 4))
x = tf.keras.layers.Conv1D(20, 3)(inp)
x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Dense(10)(x)
y1 = tf.keras.layers.Dense(1)(x)
y2 = tf.keras.layers.Dense(1)(x)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2])
fast_ism_model = fastISM.FastISM(
model, test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
def test_two_conv_fc_per_conv(self):
# /- D -> y1
# inp -> C
# \_ C -> D -> y2
inp = tf.keras.Input((100, 4))
x1 = tf.keras.layers.Conv1D(20, 3)(inp)
x2 = tf.keras.layers.Conv1D(20, 3)(x1)
x1f = tf.keras.layers.Flatten()(x1)
x2f = tf.keras.layers.Flatten()(x2)
y1 = tf.keras.layers.Dense(1)(x1f)
y2 = tf.keras.layers.Dense(1)(x2f)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2])
fast_ism_model = fastISM.FastISM(
model, test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
def test_three_conv_maxpool_fc_per_conv(self):
# /- D -> y1
# inp -> C -> MX -> C -> MX -> C -> MX -> D -> y2
# \_ C -> D -> y3
inp = tf.keras.Input((100, 4))
x1 = tf.keras.layers.Conv1D(20, 3, padding='same')(inp)
x1 = tf.keras.layers.MaxPool1D(2)(x1)
x2 = tf.keras.layers.Conv1D(10, 4, padding='same')(x1)
x2 = tf.keras.layers.MaxPool1D(2)(x2)
x3 = tf.keras.layers.Conv1D(10, 3)(x2)
x3 = tf.keras.layers.MaxPool1D(3)(x3)
x1f = tf.keras.layers.Flatten()(x1)
x2f = tf.keras.layers.Flatten()(x2)
x3f = tf.keras.layers.Flatten()(x3)
y1 = tf.keras.layers.Dense(1)(x1f)
y2 = tf.keras.layers.Dense(1)(x2f)
y3 = tf.keras.layers.Dense(1)(x3f)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2, y3])
fast_ism_model = fastISM.FastISM(
model, test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
def test_input_split_conv_fc(self):
# /- C -> D -> y1
# inp
# \_ C -> D -> y2
inp = tf.keras.Input((100, 4))
x1 = tf.keras.layers.Conv1D(20, 3)(inp)
x2 = tf.keras.layers.Conv1D(10, 4)(inp)
x1f = tf.keras.layers.Flatten()(x1)
x2f = tf.keras.layers.Flatten()(x2)
y1 = tf.keras.layers.Dense(1)(x1f)
y2 = tf.keras.layers.Dense(1)(x2f)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2])
fast_ism_model = fastISM.FastISM(
model, test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
def test_input_split_complex(self):
# /- C -> MXP -> C -> MXP -> D -> y1
# inp \_ C -> MXP -> D -> D -> y2
# \_ C -> MXP -> D -> y3
inp = tf.keras.Input((100, 4))
# first row
x1 = tf.keras.layers.Conv1D(20, 3, dilation_rate=2)(inp)
x1 = tf.keras.layers.MaxPooling1D(2)(x1)
x11 = tf.keras.layers.Conv1D(20, 3, dilation_rate=3)(x1)
x11 = tf.keras.layers.MaxPooling1D(2)(x11)
x11f = tf.keras.layers.Flatten()(x11)
y1 = tf.keras.layers.Dense(5)(x11f)
# second row
x12 = tf.keras.layers.Conv1D(
15, 2, padding='same', activation='relu')(x1)
x12 = tf.keras.layers.MaxPooling1D(2)(x12)
x12f = tf.keras.layers.Flatten()(x12)
y2 = tf.keras.layers.Dense(5)(x12f)
y2 = tf.keras.layers.Dense(2, activation='tanh')(y2)
# third row
x2 = tf.keras.layers.Conv1D(10, 4, padding='same')(inp)
x2 = tf.keras.layers.MaxPool1D(3)(x2)
x2f = tf.keras.layers.Flatten()(x2)
y3 = tf.keras.layers.Dense(1)(x2f)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2, y3])
fast_ism_model = fastISM.FastISM(
model, test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
def test_input_split_complex_10bp_change_range(self):
# /- C -> MXP -> C -> MXP -> D -> y1
# inp \_ C -> MXP -> D -> D -> y2
# \_ C -> MXP -> D -> y3
inp = tf.keras.Input((100, 4))
# first row
x1 = tf.keras.layers.Conv1D(20, 3, dilation_rate=2)(inp)
x1 = tf.keras.layers.MaxPooling1D(2)(x1)
x11 = tf.keras.layers.Conv1D(20, 3, dilation_rate=3)(x1)
x11 = tf.keras.layers.MaxPooling1D(2)(x11)
x11f = tf.keras.layers.Flatten()(x11)
y1 = tf.keras.layers.Dense(5)(x11f)
# second row
x12 = tf.keras.layers.Conv1D(
15, 2, padding='same', activation='relu')(x1)
x12 = tf.keras.layers.MaxPooling1D(2)(x12)
x12f = tf.keras.layers.Flatten()(x12)
y2 = tf.keras.layers.Dense(5)(x12f)
y2 = tf.keras.layers.Dense(2, activation='tanh')(y2)
# third row
x2 = tf.keras.layers.Conv1D(10, 4, padding='same')(inp)
x2 = tf.keras.layers.MaxPool1D(3)(x2)
x2f = tf.keras.layers.Flatten()(x2)
y3 = tf.keras.layers.Dense(1)(x2f)
model = tf.keras.Model(inputs=inp, outputs=[y1, y2, y3])
fast_ism_model = fastISM.FastISM(
model, change_ranges=[(i, i+10) for i in range(0, 100, 10)],
test_correctness=False)
self.assertTrue(fast_ism_model.test_correctness())
if __name__ == '__main__':
unittest.main()
| 35.832461 | 72 | 0.554793 | 944 | 6,844 | 3.911017 | 0.09428 | 0.159263 | 0.239437 | 0.112134 | 0.907367 | 0.877844 | 0.870802 | 0.869989 | 0.851571 | 0.842091 | 0 | 0.068774 | 0.288282 | 6,844 | 190 | 73 | 36.021053 | 0.689181 | 0.091613 | 0 | 0.776 | 0 | 0 | 0.007768 | 0 | 0 | 0 | 0 | 0 | 0.064 | 1 | 0.064 | false | 0 | 0.024 | 0 | 0.096 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d2ff0804455cce2dbc85234e053634e10647a459 | 22,166 | py | Python | embyapi/api/dlna_service_api.py | stanionascu/python-embyapi | a3f7aa49aea4052277cc43605c0d89bc6ff21913 | [
"BSD-3-Clause"
] | null | null | null | embyapi/api/dlna_service_api.py | stanionascu/python-embyapi | a3f7aa49aea4052277cc43605c0d89bc6ff21913 | [
"BSD-3-Clause"
] | null | null | null | embyapi/api/dlna_service_api.py | stanionascu/python-embyapi | a3f7aa49aea4052277cc43605c0d89bc6ff21913 | [
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
"""
Emby Server API
Explore the Emby Server API # noqa: E501
OpenAPI spec version: 4.1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from embyapi.api_client import ApiClient
class DlnaServiceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_dlna_profiles_by_id(self, id, **kwargs): # noqa: E501
"""Deletes a profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dlna_profiles_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Profile Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_dlna_profiles_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_dlna_profiles_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_dlna_profiles_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes a profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_dlna_profiles_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Profile Id (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_dlna_profiles_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_dlna_profiles_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Dlna/Profiles/{Id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dlna_profileinfos(self, **kwargs): # noqa: E501
"""Gets a list of profiles # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dlna_profileinfos(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[DlnaDeviceProfileInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dlna_profileinfos_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_dlna_profileinfos_with_http_info(**kwargs) # noqa: E501
return data
def get_dlna_profileinfos_with_http_info(self, **kwargs): # noqa: E501
"""Gets a list of profiles # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dlna_profileinfos_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[DlnaDeviceProfileInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dlna_profileinfos" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Dlna/ProfileInfos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DlnaDeviceProfileInfo]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dlna_profiles_by_id(self, id, **kwargs): # noqa: E501
"""Gets a single profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dlna_profiles_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Profile Id (required)
:return: DlnaDeviceProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dlna_profiles_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_dlna_profiles_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_dlna_profiles_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Gets a single profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dlna_profiles_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Profile Id (required)
:return: DlnaDeviceProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dlna_profiles_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_dlna_profiles_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Dlna/Profiles/{Id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DlnaDeviceProfile', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dlna_profiles_default(self, **kwargs): # noqa: E501
"""Gets the default profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dlna_profiles_default(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: DlnaDeviceProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dlna_profiles_default_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_dlna_profiles_default_with_http_info(**kwargs) # noqa: E501
return data
def get_dlna_profiles_default_with_http_info(self, **kwargs): # noqa: E501
"""Gets the default profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dlna_profiles_default_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: DlnaDeviceProfile
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dlna_profiles_default" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Dlna/Profiles/Default', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DlnaDeviceProfile', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_dlna_profiles(self, body, **kwargs): # noqa: E501
"""Creates a profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_dlna_profiles(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DlnaDeviceProfile body: DeviceProfile: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_dlna_profiles_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.post_dlna_profiles_with_http_info(body, **kwargs) # noqa: E501
return data
def post_dlna_profiles_with_http_info(self, body, **kwargs): # noqa: E501
"""Creates a profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_dlna_profiles_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DlnaDeviceProfile body: DeviceProfile: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_dlna_profiles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_dlna_profiles`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Dlna/Profiles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_dlna_profiles_by_id(self, body, id, **kwargs): # noqa: E501
"""Updates a profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_dlna_profiles_by_id(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DlnaDeviceProfile body: DeviceProfile: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_dlna_profiles_by_id_with_http_info(body, id, **kwargs) # noqa: E501
else:
(data) = self.post_dlna_profiles_by_id_with_http_info(body, id, **kwargs) # noqa: E501
return data
def post_dlna_profiles_by_id_with_http_info(self, body, id, **kwargs): # noqa: E501
"""Updates a profile # noqa: E501
Requires authentication as administrator # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_dlna_profiles_by_id_with_http_info(body, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DlnaDeviceProfile body: DeviceProfile: (required)
:param str id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_dlna_profiles_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `post_dlna_profiles_by_id`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `post_dlna_profiles_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['Id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/xml']) # noqa: E501
# Authentication setting
auth_settings = ['apikeyauth', 'embyauth'] # noqa: E501
return self.api_client.call_api(
'/Dlna/Profiles/{Id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 37.442568 | 123 | 0.605206 | 2,533 | 22,166 | 5.030399 | 0.06514 | 0.051483 | 0.027468 | 0.031392 | 0.961388 | 0.959661 | 0.956129 | 0.948674 | 0.945064 | 0.935567 | 0 | 0.016632 | 0.305603 | 22,166 | 591 | 124 | 37.505922 | 0.811201 | 0.323559 | 0 | 0.812102 | 1 | 0 | 0.174479 | 0.048616 | 0 | 0 | 0 | 0 | 0 | 1 | 0.041401 | false | 0 | 0.012739 | 0 | 0.11465 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8254941bc8dc7ea53f323f075a5e284cb6c130dc | 219 | py | Python | chem_spider/get_sigma_chromatography_urls.py | mutoulbj/chem_spider | c544dd8d11a2e7d1404942814b4162617022c7c3 | [
"MIT"
] | 1 | 2020-03-17T09:07:28.000Z | 2020-03-17T09:07:28.000Z | chem_spider/get_sigma_chromatography_urls.py | mutoulbj/chem_spider | c544dd8d11a2e7d1404942814b4162617022c7c3 | [
"MIT"
] | 1 | 2016-06-29T10:52:09.000Z | 2016-07-21T05:06:20.000Z | chem_spider/get_sigma_chromatography_urls.py | mutoulbj/chem_spider | c544dd8d11a2e7d1404942814b4162617022c7c3 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from sigma_urls import get_chromatography_urls, get_chromatography_base_urls
if __name__ == '__main__':
# get_chromatography_base_urls()
get_chromatography_urls() | 27.375 | 76 | 0.753425 | 28 | 219 | 5.214286 | 0.607143 | 0.465753 | 0.287671 | 0.342466 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005263 | 0.13242 | 219 | 8 | 77 | 27.375 | 0.763158 | 0.3379 | 0 | 0 | 0 | 0 | 0.055944 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
82631d047777697fed90766b0d30bf4b6ad5bda4 | 68,606 | py | Python | benchmarks/SimResults/combinations_spec_rr/oldstuff/cmp_bwavesgccmcfhmmer/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/combinations_spec_rr/oldstuff/cmp_bwavesgccmcfhmmer/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/combinations_spec_rr/oldstuff/cmp_bwavesgccmcfhmmer/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 5.66814e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202693,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 2.02403e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.352327,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.610102,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.349911,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.31234,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.348257,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.55907,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 3.82383e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0127721,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0923611,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0944576,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0923649,
'Execution Unit/Register Files/Runtime Dynamic': 0.10723,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.223184,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.573441,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.60108,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00398203,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00398203,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00348681,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.0013599,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00135689,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0128078,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0375198,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0908045,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 5.77595,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.342284,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.308413,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.27907,
'Instruction Fetch Unit/Runtime Dynamic': 0.791829,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0701306,
'L2/Runtime Dynamic': 0.0156848,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.98615,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.34396,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0889378,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0889378,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.40785,
'Load Store Unit/Runtime Dynamic': 1.87151,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.219305,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.438611,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0778322,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0786009,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.359127,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0569551,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.652183,
'Memory Management Unit/Runtime Dynamic': 0.135556,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 23.53,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 1.3115e-05,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0180162,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.182411,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.20044,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.6161,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0494082,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.241496,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.264644,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.113725,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.183434,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0925912,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.38975,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0894935,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.46543,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.049997,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00477013,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0530798,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.035278,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.103077,
'Execution Unit/Register Files/Runtime Dynamic': 0.0400481,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.124179,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.309301,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.38597,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000321836,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000321836,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000295428,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000122629,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000506771,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00144587,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00254589,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0339136,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.1572,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0775002,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.115186,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.48041,
'Instruction Fetch Unit/Runtime Dynamic': 0.230592,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0456765,
'L2/Runtime Dynamic': 0.00373231,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.59396,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.656068,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.043897,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0438969,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.80125,
'Load Store Unit/Runtime Dynamic': 0.916449,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.108242,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.216485,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0384156,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0390997,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.134127,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0127106,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.356227,
'Memory Management Unit/Runtime Dynamic': 0.0518103,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.7385,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.13152,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00673152,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0558299,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.194081,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.78264,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0910279,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.146825,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0741121,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.311965,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.104109,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.02647,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00381812,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0276097,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0282373,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0276097,
'Execution Unit/Register Files/Runtime Dynamic': 0.0320554,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.058166,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.169562,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.12165,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00047959,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00047959,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000421603,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000165332,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000405631,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00178641,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00445962,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0271453,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.72667,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0530982,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0921975,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.02899,
'Instruction Fetch Unit/Runtime Dynamic': 0.178687,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0336301,
'L2/Runtime Dynamic': 0.00790809,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.2966,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.520641,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0342766,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0342765,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.45846,
'Load Store Unit/Runtime Dynamic': 0.723958,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0845202,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.16904,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0299965,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0305011,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.107358,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0087062,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.314996,
'Memory Management Unit/Runtime Dynamic': 0.0392073,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.452,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00410693,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0479485,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0520554,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.12346,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 2.83407e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202691,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.77103e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.533065,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.859814,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.434005,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.82688,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.60967,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.01642,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 3.34585e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0223591,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.161686,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.16536,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.161689,
'Execution Unit/Register Files/Runtime Dynamic': 0.187719,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.340628,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.16864,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 3.79131,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0011053,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0011053,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000958836,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000369062,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0023754,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00554483,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0107359,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.158964,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.346001,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.539914,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 1.06116,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0445263,
'L2/Runtime Dynamic': 0.0304502,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 8.41468,
'Load Store Unit/Data Cache/Runtime Dynamic': 3.48987,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.232211,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.232211,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 9.51123,
'Load Store Unit/Runtime Dynamic': 4.86726,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.572593,
'Load Store Unit/StoreQ/Runtime Dynamic': 1.14519,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.203215,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.20388,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0567316,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.905191,
'Memory Management Unit/Runtime Dynamic': 0.260612,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 28.0308,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 8.26482e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0240505,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.284811,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.30887,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 10.3197,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 5.966725963201988,
'Runtime Dynamic': 5.966725963201988,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.261549,
'Runtime Dynamic': 0.114035,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 82.0128,
'Peak Power': 115.125,
'Runtime Dynamic': 20.9559,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 81.7513,
'Total Cores/Runtime Dynamic': 20.8419,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.261549,
'Total L3s/Runtime Dynamic': 0.114035,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.061269 | 124 | 0.681952 | 8,090 | 68,606 | 5.777256 | 0.067491 | 0.123583 | 0.11297 | 0.093457 | 0.939728 | 0.930549 | 0.918739 | 0.8881 | 0.862211 | 0.842505 | 0 | 0.131598 | 0.224339 | 68,606 | 914 | 125 | 75.061269 | 0.746688 | 0 | 0 | 0.642232 | 0 | 0 | 0.65744 | 0.0481 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
82df42def0d096cc5a4811a464fbd3cd8ebbd9e8 | 15,902 | py | Python | tests/test_generate_image.py | Shdwzen/vqgan-clip-generator | 96c9531151eb872ad9c270195f9639c04273d1c2 | [
"MIT"
] | 59 | 2021-11-01T03:38:33.000Z | 2022-03-30T17:05:21.000Z | tests/test_generate_image.py | Shdwzen/vqgan-clip-generator | 96c9531151eb872ad9c270195f9639c04273d1c2 | [
"MIT"
] | 8 | 2021-10-17T22:28:01.000Z | 2022-02-21T21:46:39.000Z | tests/test_generate_image.py | Shdwzen/vqgan-clip-generator | 96c9531151eb872ad9c270195f9639c04273d1c2 | [
"MIT"
] | 18 | 2021-10-21T05:19:56.000Z | 2022-03-30T16:30:27.000Z | import pytest
import vqgan_clip.generate
from vqgan_clip.engine import VQGAN_CLIP_Config
import os
import vqgan_clip._functional as VF
import glob
from vqgan_clip import video_tools
@pytest.fixture
def testing_config():
config = VQGAN_CLIP_Config()
config.vqgan_checkpoint = f'C:\\Users\\ryanh\\Documents\\src\\vqgan_lib_dev\\models\\vqgan_imagenet_f16_16384.ckpt'
config.vqgan_config = f'C:\\Users\\ryanh\\Documents\\src\\vqgan_lib_dev\\models\\vqgan_imagenet_f16_16384.yaml'
return config
TEST_DATA_DIR = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'test_files',
)
IMAGE_1 = os.path.join(TEST_DATA_DIR,'prompt1.jpg')
IMAGE_2 = os.path.join(TEST_DATA_DIR,'prompt2.jpg')
IMAGE_PROMPTS = f'{IMAGE_1}:0.5|{IMAGE_2}:0.5'
TEST_VIDEO = os.path.join(TEST_DATA_DIR,'small.mp4')
def test_image_invalid_input(testing_config, tmpdir):
'''Confirm we get an exception when given invalid prompts
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.png'))
with pytest.raises(ValueError, match='text_prompts must be a string'):
vqgan_clip.generate.image(eng_config=config,
text_prompts = 3,
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = 50,
output_filename = output_filename)
with pytest.raises(ValueError, match='image_prompts must be a string'):
vqgan_clip.generate.image(eng_config=config,
text_prompts = [],
image_prompts = 3,
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = 50,
output_filename = output_filename)
with pytest.raises(ValueError, match='noise_prompts must be a string'):
vqgan_clip.generate.image(eng_config=config,
text_prompts = [],
image_prompts = [],
noise_prompts = 3,
init_image = [],
iterations = 5,
save_every = 50,
output_filename = output_filename)
init_image = output_filename
with pytest.raises(ValueError, match=f'init_image does not exist.'):
vqgan_clip.generate.image(eng_config=config,
text_prompts = [],
image_prompts = [],
noise_prompts = [],
init_image = init_image,
iterations = 5,
save_every = 50,
output_filename = output_filename)
with pytest.raises(ValueError, match=f'save_every must be an int.'):
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'test prompt',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = [50],
output_filename = output_filename)
with pytest.raises(ValueError, match='No valid prompts were provided'):
vqgan_clip.generate.image(eng_config=config,
text_prompts = [],
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = 50,
output_filename = output_filename)
def test_image_png(testing_config, tmpdir):
'''Generate a single png image based on a text prompt
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.png'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = 50,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_bmp(testing_config, tmpdir):
'''Generate a single bmp image based on a text prompt. Testing a format with no package metadata support.
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.bmp'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = 50,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_jpg_save_every(testing_config, tmpdir):
'''Generate a single jpg image based on a text prompt
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = 2,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_jpg(testing_config, tmpdir):
'''Generate a single jpg image based on a text prompt, no save every
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_story(testing_config, tmpdir):
'''Generate a single image based on a text prompt changing every 10 iterations
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
iterations = 100,
save_every = 50,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_noise_prompt(testing_config, tmpdir):
'''Generate a single image based on a noise prompt with save_every==50
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
noise_prompts = '123:0.1|234:0.2|345:0.3',
iterations = 100,
save_every = 50,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_noise_prompt_story(testing_config, tmpdir):
'''Generate a single image based on a noise prompt no save every
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
noise_prompts = '123:0.1|234:0.2|345:0.3^700',
iterations = 100,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_image_prompt(testing_config, tmpdir):
'''Generate a single image based on a image prompt
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
image_prompts = IMAGE_PROMPTS,
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_init_image(testing_config, tmpdir):
'''Generate a single image based on a image prompt
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
init_image = IMAGE_1
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style',
init_image = init_image,
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_init_image_weight(testing_config, tmpdir):
'''Generate a single image based on a image prompt
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
init_image = IMAGE_1
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style',
init_image = init_image,
init_weight= 0.5,
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_all_prompts(testing_config, tmpdir):
'''Generate a single image based on a text prompt, image prompt, and noise prompt.
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = IMAGE_PROMPTS,
noise_prompts = '123:0.1|234:0.2|345:0.3^700',
iterations = 100,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_all_prompts_story(testing_config, tmpdir):
'''Generate a single image based on a text prompt, image prompt, and noise prompt.
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = IMAGE_PROMPTS,
noise_prompts = '123:0.1|234:0.2|345:0.3^700',
iterations = 100,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_no_folder(testing_config):
'''Output filename specified without a folder
'''
config = testing_config
config.output_image_size = [128,128]
output_filename = str('output.jpg')
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
save_every = 50,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_vqgan_imagenet_f16_16384(testing_config, tmpdir):
'''Generate a single jpg image using vqgan_imagenet_f16_16384
'''
config = testing_config
config.vqgan_model_name = 'vqgan_imagenet_f16_16384'
config.vqgan_model_yaml_url = f'https://heibox.uni-heidelberg.de/d/a7530b09fed84f80a887/files/?p=%2Fconfigs%2Fmodel.yaml&dl=1'
config.vqgan_model_ckpt_url = f'https://heibox.uni-heidelberg.de/d/a7530b09fed84f80a887/files/?p=%2Fckpts%2Flast.ckpt&dl=1'
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_vqgan_imagenet_f16_1024(testing_config, tmpdir):
'''Generate a single jpg image using vqgan_imagenet_f16_1024
'''
config = testing_config
config.vqgan_model_name = 'vqgan_imagenet_f16_1024'
config.vqgan_model_yaml_url = f'https://heibox.uni-heidelberg.de/d/8088892a516d4e3baf92/files/?p=%2Fconfigs%2Fmodel.yaml&dl=1'
config.vqgan_model_ckpt_url = f'https://heibox.uni-heidelberg.de/d/8088892a516d4e3baf92/files/?p=%2Fckpts%2Flast.ckpt&dl=1'
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_sflckr(testing_config, tmpdir):
'''Generate a single jpg image using sflckr
'''
config = testing_config
config.vqgan_model_name = 'vqgan_imagenet_f16_1024'
config.vqgan_model_yaml_url = f'https://heibox.uni-heidelberg.de/d/8088892a516d4e3baf92/files/?p=%2Fconfigs%2Fmodel.yaml&dl=1'
config.vqgan_model_ckpt_url = f'https://heibox.uni-heidelberg.de/d/8088892a516d4e3baf92/files/?p=%2Fckpts%2Flast.ckpt&dl=1'
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_coco_transformer(testing_config, tmpdir):
'''Generate a single jpg image using coco_transformer
'''
config = testing_config
config.vqgan_model_name = 'coco_transformer'
config.vqgan_model_yaml_url = f'https://dl.nmkd.de/ai/clip/coco/coco.yaml'
config.vqgan_model_ckpt_url = f'https://dl.nmkd.de/ai/clip/coco/coco.ckpt'
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename)
def test_image_vqgan_gumbel_f8(testing_config, tmpdir):
'''Generate a single jpg image using coco_transformer
'''
config = testing_config
config.vqgan_model_name = 'vqgan_gumbel_f8'
config.vqgan_model_yaml_url = f'https://heibox.uni-heidelberg.de/d/2e5662443a6b4307b470/files/?p=%2Fconfigs%2Fmodel.yaml&dl=1'
config.vqgan_model_ckpt_url = f'https://heibox.uni-heidelberg.de/d/2e5662443a6b4307b470/files/?p=%2Fckpts%2Flast.ckpt&dl=1'
config.output_image_size = [128,128]
output_filename = str(tmpdir.mkdir('output').join('output.jpg'))
vqgan_clip.generate.image(eng_config=config,
text_prompts = 'A painting of flowers in the renaissance style:0.5|rembrandt:0.5^fish:0.2|love:1',
image_prompts = [],
noise_prompts = [],
init_image = [],
iterations = 5,
output_filename = output_filename)
assert os.path.exists(output_filename)
os.remove(output_filename) | 41.847368 | 130 | 0.683499 | 2,127 | 15,902 | 4.875411 | 0.074753 | 0.141755 | 0.040984 | 0.050916 | 0.915429 | 0.909933 | 0.895082 | 0.891225 | 0.88621 | 0.877049 | 0 | 0.043647 | 0.204691 | 15,902 | 380 | 131 | 41.847368 | 0.776311 | 0.077223 | 0 | 0.79375 | 1 | 0.065625 | 0.196995 | 0.060382 | 0 | 0 | 0 | 0 | 0.05625 | 1 | 0.0625 | false | 0 | 0.021875 | 0 | 0.0875 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
7d6f8883f8bfb8138f47e301de7b46a3a02e9f64 | 1,689 | py | Python | tests/data/text.py | V-ampire/advego-antiplagiat-api | fe2dab5f5728bf5f1933c201e0aa45edf2792407 | [
"MIT"
] | 5 | 2021-03-01T10:57:39.000Z | 2021-07-21T12:17:19.000Z | tests/data/text.py | V-ampire/advego-antiplagiat-api | fe2dab5f5728bf5f1933c201e0aa45edf2792407 | [
"MIT"
] | null | null | null | tests/data/text.py | V-ampire/advego-antiplagiat-api | fe2dab5f5728bf5f1933c201e0aa45edf2792407 | [
"MIT"
] | null | null | null | """
Тексты для тестов.
"""
dirty = """
Lorem ipsum dolor sit amet, consectetur adipisicing elit.§Officia, animi culpa saepe dolorum quis assumenda neque laborum numquam illo
officiis dignissimos eos asperiores maxime harum et. Voluptatum eligendi, non incidunt.
∙Lorem ipsum dolor sit amet, consectetur adipisicing elit.
‣Lorem ipsum dolor sit amet, consectetur adipisicing elit.
⁃Lorem ipsum dolor sit amet, consectetur adipisicing elit.
⁌Lorem ipsum dolor sit amet, consectetur adipisicing elit.
⁍Lorem ipsum dolor sit amet, consectetur adipisicing elit.
◦Lorem ipsum dolor sit amet, consectetur adipisicing elit.
¶Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Odit amet deserunt consequatur dicta, impedit nobis optio enim praesentium minima cupiditate
dolores officiis atque officia dolor, non molestias repellat, omnis accusamus?
"""
cleaned = """
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Officia, animi culpa saepe dolorum quis assumenda neque laborum numquam illo
officiis dignissimos eos asperiores maxime harum et. Voluptatum eligendi, non incidunt.
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Lorem ipsum dolor sit amet, consectetur adipisicing elit.
Odit amet deserunt consequatur dicta, impedit nobis optio enim praesentium minima cupiditate
dolores officiis atque officia dolor, non molestias repellat, omnis accusamus?
"""
| 40.214286 | 135 | 0.808171 | 229 | 1,689 | 5.995633 | 0.262009 | 0.116533 | 0.1748 | 0.20976 | 0.974508 | 0.974508 | 0.974508 | 0.974508 | 0.694829 | 0.694829 | 0 | 0 | 0.14624 | 1,689 | 41 | 136 | 41.195122 | 0.946602 | 0.010657 | 0 | 0.592593 | 0 | 0.037037 | 0.977751 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8163c164255a6495dcbf49ccd4aa98aadfc69cec | 180 | py | Python | creditagricole_particuliers/__init__.py | 7MAXIME7-dev/creditagricole-particuliers | f1975422ab557cb5d55176f36cffe0fb1ec720f0 | [
"MIT"
] | null | null | null | creditagricole_particuliers/__init__.py | 7MAXIME7-dev/creditagricole-particuliers | f1975422ab557cb5d55176f36cffe0fb1ec720f0 | [
"MIT"
] | null | null | null | creditagricole_particuliers/__init__.py | 7MAXIME7-dev/creditagricole-particuliers | f1975422ab557cb5d55176f36cffe0fb1ec720f0 | [
"MIT"
] | null | null | null | from creditagricole_particuliers.accounts import Accounts
from creditagricole_particuliers.authenticator import Authenticator
from creditagricole_particuliers.logout import Logout
| 45 | 67 | 0.916667 | 18 | 180 | 9 | 0.388889 | 0.333333 | 0.555556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.066667 | 180 | 3 | 68 | 60 | 0.964286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
81717786a02e17eaa82c5dd2ebd4a25f9f3762b0 | 26,322 | py | Python | test/test_learning.py | Arun-Niranjan/pymdp | 2f7ca7e90254f0ea26f6ceb947f3b45280d18307 | [
"MIT"
] | null | null | null | test/test_learning.py | Arun-Niranjan/pymdp | 2f7ca7e90254f0ea26f6ceb947f3b45280d18307 | [
"MIT"
] | null | null | null | test/test_learning.py | Arun-Niranjan/pymdp | 2f7ca7e90254f0ea26f6ceb947f3b45280d18307 | [
"MIT"
] | 1 | 2021-08-16T08:25:55.000Z | 2021-08-16T08:25:55.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Unit Tests
__author__: Conor Heins, Alexander Tschantz, Brennan Klein
"""
import unittest
import numpy as np
from pymdp.distributions import Categorical, Dirichlet
from pymdp.core import maths, learning
def construct_generic_A(num_obs, n_states):
"""
Generates a random likelihood array
"""
num_modalities = len(num_obs)
if num_modalities == 1:
A = np.random.rand(*(num_obs + n_states))
A = np.divide(A, A.sum(axis=0))
else:
A = np.empty(num_modalities, dtype=object)
for modality, no in enumerate(num_obs):
tmp = np.random.rand((*([no] + n_states)))
tmp = np.divide(tmp, tmp.sum(axis=0))
A[modality] = tmp
return A
def construct_pA(num_obs, n_states, prior_scale=1.0):
"""
Generates Dirichlet prior over a observation likelihood distribution
(initialized to all ones * prior_scale parameter)
"""
num_modalities = len(num_obs)
if num_modalities == 1:
pA = prior_scale * np.ones((num_obs + n_states))
else:
pA = np.empty(num_modalities, dtype=object)
for modality, no in enumerate(num_obs):
pA[modality] = prior_scale * np.ones((no, *n_states))
return pA
def construct_generic_B(n_states, n_control):
"""
Generates a fully controllable transition likelihood array, where each action
(control state) corresponds to a move to the n-th state from any other state,
for each control factor
"""
num_factors = len(n_states)
if num_factors == 1:
B = np.eye(n_states[0])[:, :, np.newaxis]
B = np.tile(B, (1, 1, n_control[0]))
B = B.transpose(1, 2, 0)
else:
B = np.empty(num_factors, dtype=object)
for factor, nc in enumerate(n_control):
tmp = np.eye(nc)[:, :, np.newaxis]
tmp = np.tile(tmp, (1, 1, nc))
B[factor] = tmp.transpose(1, 2, 0)
return B
def construct_pB(n_states, n_control, prior_scale=1.0):
"""
Generates Dirichlet prior over a transition likelihood distribution
(initialized to all ones * prior_scale parameter)
"""
num_factors = len(n_states)
if num_factors == 1:
pB = prior_scale * np.ones((n_states[0], n_states[0]))[:, :, np.newaxis]
pB = np.tile(pB, (1, 1, n_control[0]))
pB = pB.transpose(1, 2, 0)
else:
pB = np.empty(num_factors, dtype=object)
for factor, nc in enumerate(n_control):
tmp = prior_scale * np.ones((nc, nc))[:, :, np.newaxis]
tmp = np.tile(tmp, (1, 1, nc))
pB[factor] = tmp.transpose(1, 2, 0)
return pB
def construct_init_qs(n_states):
"""
Creates a random initial posterior
"""
num_factors = len(n_states)
if num_factors == 1:
qs = np.random.rand(n_states[0])
qs = qs / qs.sum()
else:
qs = np.empty(num_factors, dtype=object)
for factor, ns in enumerate(n_states):
tmp = np.random.rand(ns)
qs[factor] = tmp / tmp.sum()
return qs
class TestLearning(unittest.TestCase):
def test_update_pA_single_factor_all(self):
"""
Test for updating prior Dirichlet parameters over sensory likelihood (pA)
in the case that all observation modalities are updated and the generative model
has a single hidden state factor
"""
n_states = [3]
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
# single observation modality
num_obs = [4]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities="all", return_numpy=True
)
validation_pA = pA + l_rate * maths.spm_cross(np.eye(*num_obs)[observation], qs.values)
self.assertTrue(np.all(pA_updated == validation_pA.values))
# multiple observation modalities
num_obs = [3, 4]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities="all", return_numpy=True
)
for modality, no in enumerate(num_obs):
update = maths.spm_cross(np.eye(no)[observation[modality]], qs.values)
validation_pA = pA[modality] + l_rate * update
self.assertTrue(np.all(pA_updated[modality] == validation_pA.values))
def test_update_pA_single_factor_one_modality(self):
"""
Test for updating prior Dirichlet parameters over sensory likelihood (pA)
in the case that ONE observation modalities is updated and the generative model
has a single hidden state factor
"""
n_states = [3]
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
# multiple observation modalities
num_obs = [3, 4]
modality_to_update = [np.random.randint(len(num_obs))]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities=modality_to_update, return_numpy=True
)
for modality, no in enumerate(num_obs):
if modality in modality_to_update:
update = maths.spm_cross(np.eye(no)[observation[modality]], qs.values)
validation_pA = pA[modality] + l_rate * update
else:
validation_pA = pA[modality]
self.assertTrue(np.all(pA_updated[modality] == validation_pA.values))
def test_update_pA_single_factor_some_modalities(self):
"""
Test for updating prior Dirichlet parameters over sensory likelihood (pA)
in the case that some observation modalities are updated and the generative model
has a single hidden state factor
"""
n_states = [3]
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
# multiple observation modalities
num_obs = [3, 4, 5]
modalities_to_update = [0, 2]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities=modalities_to_update, return_numpy=True
)
for modality, no in enumerate(num_obs):
if modality in modalities_to_update:
update = maths.spm_cross(np.eye(no)[observation[modality]], qs.values)
validation_pA = pA[modality] + l_rate * update
else:
validation_pA = pA[modality]
self.assertTrue(np.all(pA_updated[modality] == validation_pA.values))
def test_update_pA_multi_factor_all(self):
"""
Test for updating prior Dirichlet parameters over sensory likelihood (pA)
in the case that all observation modalities are updated and the generative model
has multiple hidden state factors
"""
n_states = [2, 6]
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
# single observation modality
num_obs = [4]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities="all", return_numpy=True
)
update = maths.spm_cross(np.eye(*num_obs)[observation], qs.values)
validation_pA = pA + l_rate * update
self.assertTrue(np.all(pA_updated == validation_pA.values))
# multiple observation modalities
num_obs = [3, 4]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities="all", return_numpy=True
)
for modality, no in enumerate(num_obs):
update = maths.spm_cross(np.eye(no)[observation[modality]], qs.values)
validation_pA = pA[modality] + l_rate * update
self.assertTrue(np.all(pA_updated[modality] == validation_pA.values))
def test_update_pA_multi_factor_one_modality(self):
"""
Test for updating prior Dirichlet parameters over sensory likelihood (pA)
in the case that ONE observation modalities is updated and the generative model
has multiple hidden state factors
"""
n_states = [2, 6]
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
# multiple observation modalities
num_obs = [3, 4]
modality_to_update = [np.random.randint(len(num_obs))]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities=modality_to_update, return_numpy=True
)
for modality, no in enumerate(num_obs):
if modality in modality_to_update:
update = maths.spm_cross(np.eye(no)[observation[modality]], qs.values)
validation_pA = pA[modality] + l_rate * update
else:
validation_pA = pA[modality]
self.assertTrue(np.all(pA_updated[modality] == validation_pA.values))
def test_update_pA_multi_factor_some_modalities(self):
"""
Test for updating prior Dirichlet parameters over sensory likelihood (pA)
in the case that SOME observation modalities are updated and the generative model
has multiple hidden state factors
"""
n_states = [2, 6]
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
# multiple observation modalities
num_obs = [3, 4, 5]
modalities_to_update = [0, 2]
A = Categorical(values=construct_generic_A(num_obs, n_states))
pA = Dirichlet(values=construct_pA(num_obs, n_states))
observation = A.dot(qs, return_numpy=False).sample()
pA_updated = learning.update_likelihood_dirichlet(
pA, A, observation, qs, lr=l_rate, modalities=modalities_to_update, return_numpy=True
)
for modality, no in enumerate(num_obs):
if modality in modalities_to_update:
update = maths.spm_cross(np.eye(no)[observation[modality]], qs.values)
validation_pA = pA[modality] + l_rate * update
else:
validation_pA = pA[modality]
self.assertTrue(np.all(pA_updated[modality] == validation_pA.values))
def test_update_pB_single_factor_no_actions(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that the one and only hidden state factor is updated, and there
are no actions.
"""
n_states = [3]
n_control = [1] # this is how we encode the fact that there aren't any actions
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
B = Categorical(values=np.random.rand(n_states[0], n_states[0], n_control[0]))
B.normalize()
pB = Dirichlet(values=np.ones_like(B.values))
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors="all", return_numpy=True
)
validation_pB = pB.copy()
validation_pB[:, :, 0] += (
l_rate * maths.spm_cross(qs.values, qs_prev.values) * (B[:, :, action[0]].values > 0)
)
self.assertTrue(np.all(pB_updated == validation_pB.values))
def test_update_pB_single_dactor_with_actions(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that the one and only hidden state factor is updated, and there
are actions.
"""
n_states = [3]
n_control = [3]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
B = Categorical(values=construct_generic_B(n_states, n_control))
pB = Dirichlet(values=np.ones_like(B.values))
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors="all", return_numpy=True
)
validation_pB = pB.copy()
validation_pB[:, :, action[0]] += (
l_rate * maths.spm_cross(qs.values, qs_prev.values) * (B[:, :, action[0]].values > 0)
)
self.assertTrue(np.all(pB_updated == validation_pB.values))
def test_update_pB_multi_factor_no_actions_all_factors(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and there
are no actions. All factors are updated
"""
n_states = [3, 4]
n_control = [1, 1]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
B = Categorical(
values=np.array(
[np.random.rand(ns, ns, n_control[factor]) for factor, ns in enumerate(n_states)], dtype=object
)
)
B.normalize()
pB = Dirichlet(
values=np.array(
[np.ones_like(B[factor].values) for factor in range(len(n_states))], dtype=object
)
)
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors="all", return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
def test_update_pB_multi_factor_no_actions_one_factor(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and there
are no actions. One factor is updated
"""
n_states = [3, 4]
n_control = [1, 1]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
factors_to_update = [np.random.randint(len(n_states))]
B = Categorical(
values=np.array(
[np.random.rand(ns, ns, n_control[factor]) for factor, ns in enumerate(n_states)], dtype=object
)
)
B.normalize()
pB = Dirichlet(
values=np.array(
[np.ones_like(B[factor].values) for factor in range(len(n_states))], dtype=object
)
)
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors=factors_to_update, return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
if factor in factors_to_update:
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
def test_update_pB_multi_factor_no_actions_some_factors(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and there
are no actions. Some factors are updated.
"""
n_states = [3, 4, 5]
n_control = [1, 1, 1]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
factors_to_update = [0, 2]
B = Categorical(
values=np.array(
[np.random.rand(ns, ns, n_control[factor]) for factor, ns in enumerate(n_states)], dtype=object
)
)
B.normalize()
pB = Dirichlet(
values=np.array(
[np.ones_like(B[factor].values) for factor in range(len(n_states))], dtype=object
)
)
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors=factors_to_update, return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
if factor in factors_to_update:
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
def test_update_pB_multi_factor_with_actions_all_factors(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and there
are actions. All factors are updated
"""
n_states = [3, 4, 5]
n_control = [3, 4, 5]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
B = Categorical(values=construct_generic_B(n_states, n_control))
B.normalize()
pB = Dirichlet(values=construct_pB(n_states, n_control))
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors="all", return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
def test_update_pB_multi_factor_with_actions_one_factor(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and there
are actions. One factor is updated
"""
n_states = [3, 4, 5]
n_control = [3, 4, 5]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
factors_to_update = [np.random.randint(len(n_states))]
B = Categorical(values=construct_generic_B(n_states, n_control))
B.normalize()
pB = Dirichlet(values=construct_pB(n_states, n_control))
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors=factors_to_update, return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
if factor in factors_to_update:
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
def test_update_pB_multi_factor_with_actions_some_factors(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and there
are actions. Some factors are updated
"""
n_states = [3, 4, 5]
n_control = [3, 4, 5]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
factors_to_update = [0, 1]
B = Categorical(values=construct_generic_B(n_states, n_control))
B.normalize()
pB = Dirichlet(values=construct_pB(n_states, n_control))
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors=factors_to_update, return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
if factor in factors_to_update:
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
def test_update_pB_multi_factor_some_controllable(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and some of them
are controllable. All factors are updated.
"""
n_states = [3, 4, 5]
n_control = [3, 1, 1]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
B_values = np.empty(len(n_states), dtype=object)
pB_values = np.empty(len(n_states), dtype=object)
for factor, ns in enumerate(n_states):
B_values[factor] = np.random.rand(ns, ns, n_control[factor])
pB_values[factor] = np.ones((ns, ns, n_control[factor]))
B = Categorical(values=B_values)
B.normalize()
pB = Dirichlet(values=pB_values)
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors="all", return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
def test_update_pB_multi_factor_some_controllable_some_factors(self):
"""
Test for updating prior Dirichlet parameters over transition likelihood (pB)
in the case that there are mulitple hidden state factors, and some of them
are controllable. Some factors are updated.
"""
n_states = [3, 4, 5]
n_control = [1, 3, 1]
qs_prev = Categorical(values=construct_init_qs(n_states))
qs = Categorical(values=construct_init_qs(n_states))
l_rate = 1.0
factors_to_update = [0, 1]
B_values = np.empty(len(n_states), dtype=object)
pB_values = np.empty(len(n_states), dtype=object)
for factor, ns in enumerate(n_states):
B_values[factor] = np.random.rand(ns, ns, n_control[factor])
pB_values[factor] = np.ones((ns, ns, n_control[factor]))
B = Categorical(values=B_values)
B.normalize()
pB = Dirichlet(values=pB_values)
action = np.array([np.random.randint(nc) for nc in n_control])
pB_updated = learning.update_transition_dirichlet(
pB, B, action, qs, qs_prev, lr=l_rate, factors=factors_to_update, return_numpy=True
)
validation_pB = pB.copy()
for factor, _ in enumerate(n_control):
validation_pB = pB[factor].copy()
if factor in factors_to_update:
validation_pB[:, :, action[factor]] += (
l_rate
* maths.spm_cross(qs[factor].values, qs_prev[factor].values)
* (B[factor][:, :, action[factor]].values > 0)
)
self.assertTrue(np.all(pB_updated[factor] == validation_pB.values))
if __name__ == "__main__":
unittest.main()
| 40.62037 | 111 | 0.617924 | 3,412 | 26,322 | 4.543669 | 0.048359 | 0.04425 | 0.06373 | 0.027866 | 0.941366 | 0.932529 | 0.919048 | 0.911566 | 0.910469 | 0.88886 | 0 | 0.008944 | 0.277866 | 26,322 | 647 | 112 | 40.683153 | 0.80666 | 0.013335 | 0 | 0.750562 | 0 | 0 | 0.001613 | 0 | 0 | 0 | 0 | 0 | 0.040449 | 0 | null | null | 0 | 0.008989 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
818bd7e7f0deb48d5d18ebc5d0ec14197d54dde6 | 61,748 | py | Python | tests/test_detourbackend.py | DennyDai/patcherex | 510f63c68cfe6a386b09f109569bd143d939adbc | [
"BSD-2-Clause"
] | 80 | 2018-09-27T00:05:55.000Z | 2022-03-18T04:09:34.000Z | tests/test_detourbackend.py | DennyDai/patcherex | 510f63c68cfe6a386b09f109569bd143d939adbc | [
"BSD-2-Clause"
] | 18 | 2018-10-03T03:27:27.000Z | 2022-03-31T20:51:27.000Z | tests/test_detourbackend.py | DennyDai/patcherex | 510f63c68cfe6a386b09f109569bd143d939adbc | [
"BSD-2-Clause"
] | 20 | 2018-10-01T23:11:24.000Z | 2021-11-27T16:22:25.000Z | #!/usr/bin/env python
import os
import nose
import struct
import subprocess
import logging
from functools import wraps
import patcherex
import shellphish_qemu
from patcherex.backends.detourbackend import DetourBackend
from patcherex.patches import *
from tracer import QEMURunner
l = logging.getLogger("patcherex.test.test_detourbackend")
bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../test_binaries'))
qemu_location = shellphish_qemu.qemu_path('cgc-tracer')
global_data_fallback = None
global_try_pdf_removal = True
def add_fallback_strategy(f):
@wraps(f)
def wrapper():
global global_data_fallback
global global_try_pdf_removal
global_data_fallback = None
global_try_pdf_removal = True
f()
global_data_fallback = True
global_try_pdf_removal = True
f()
global_data_fallback = None
global_try_pdf_removal = False
f()
global_data_fallback = True
global_try_pdf_removal = False
f()
return wrapper
@add_fallback_strategy
def test_simple_inline():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
p = subprocess.Popen([qemu_location, filepath], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 100)
print(res, p.returncode)
nose.tools.assert_equal((p.returncode != 0), True)
expected = b"\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: \t\tYes, that's a palindrome!\n\n\tPlease enter a possible palindrome: "
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath, data_fallback=global_data_fallback, try_pdf_removal=global_try_pdf_removal)
p = InlinePatch(0x8048291, "mov DWORD [esp+8], 0x40;", name="asdf")
backend.apply_patches([p])
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 100)
print(res, p.returncode)
nose.tools.assert_equal((res[0] == expected and p.returncode == 0), True)
def test_added_code():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
added_code = '''
mov eax, 1
mov ebx, 0x32
int 80h
'''
p = AddCodePatch(added_code, "aaa")
backend.apply_patches([p])
backend.set_oep(backend.name_map["aaa"])
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A"*10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(p.returncode == 0x32, True)
def test_added_code_and_data():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
test_str = b"testtesttest\n\x00"
added_code = '''
mov eax, 2
mov ebx, 0
mov ecx, {added_data}
mov edx, %d
mov esi, 0
int 80h
mov eax, 1
mov ebx, 0x33
int 80h
''' % (len(test_str))
p1 = AddCodePatch(added_code, "aaa")
p2 = AddRODataPatch(test_str, "added_data")
backend.apply_patches([p1,p2])
backend.set_oep(backend.name_map["aaa"])
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A"*10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(test_str in res[0] and p.returncode == 0x33, True)
@add_fallback_strategy
def test_rw_memory():
filepath = os.path.join(bin_location, "CROMU_00070")
pipe = subprocess.PIPE
tlen=1
lenlist = []
lenlist.append(0)
lenlist.append(1)
#lenlist.append(4)
#lenlist.append(5)
#lenlist.append(0x501)
#lenlist.append(0x1000)
#lenlist.append(0x1000-1)
#lenlist.append(0x1000+1)
lenlist.append(0x2000+1)
for tlen in lenlist:
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(AddRWDataPatch(tlen, "added_data_rw"))
patches.append(AddRODataPatch(b"0123456789abcdef", "hex_array"))
added_code = '''
; eax=buf,ebx=len
pusha
mov ecx,eax
mov edx,ebx
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
popa
ret
'''
patches.append(AddCodePatch(added_code,"print"))
added_code = '''
; print eax as hex
pusha
mov ecx,32
mov ebx,eax
_print_reg_loop:
rol ebx,4
mov edi,ebx
and edi,0x0000000f
lea eax,[{hex_array}+edi]
mov ebp,ebx
mov ebx,0x1
call {print}
mov ebx,ebp
sub ecx,4
jnz _print_reg_loop
popa
ret
'''
patches.append(AddCodePatch(added_code,"print_hex_eax"))
added_code = '''
xor eax, eax
mov edx, {added_data_rw}
mov ecx, edx
add ecx, %d
_loop:
cmp edx,ecx
je _exit
xor ebx, ebx
mov bl, BYTE [edx]
add eax, ebx
mov BYTE [edx], 0x3
add edx, 1
jmp _loop
_exit
call {print_hex_eax}
''' % tlen
patches.append(AddEntryPointPatch(added_code,"sum"))
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend.apply_patches(patches)
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"\x00\x01\x01" + b"A"*1000 + b"\n")
print(str(tlen) + ":")
print(res, p.returncode)
nose.tools.assert_true(p.returncode==255)
nose.tools.assert_true(res[0].startswith(b"00000000"))
def test_ro_memory():
filepath = os.path.join(bin_location, "CROMU_00070")
pipe = subprocess.PIPE
tlen=1
lenlist = []
lenlist.append(0)
lenlist.append(1)
#lenlist.append(4)
#lenlist.append(5)
#lenlist.append(0x501)
#lenlist.append(0x1000)
#lenlist.append(0x1000-1)
#lenlist.append(0x1000+1)
lenlist.append(0x2000+1)
for tlen in lenlist:
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(AddRODataPatch(b"\x01"*tlen, "added_data_rw"))
patches.append(AddRODataPatch(b"0123456789abcdef", "hex_array"))
added_code = '''
; eax=buf,ebx=len
pusha
mov ecx,eax
mov edx,ebx
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
popa
ret
'''
patches.append(AddCodePatch(added_code,"print"))
added_code = '''
; print eax as hex
pusha
mov ecx,32
mov ebx,eax
_print_reg_loop:
rol ebx,4
mov edi,ebx
and edi,0x0000000f
lea eax,[{hex_array}+edi]
mov ebp,ebx
mov ebx,0x1
call {print}
mov ebx,ebp
sub ecx,4
jnz _print_reg_loop
popa
ret
'''
patches.append(AddCodePatch(added_code,"print_hex_eax"))
added_code = '''
xor eax, eax
mov edx, {added_data_rw}
mov ecx, edx
add ecx, %d
_loop:
cmp edx,ecx
je _exit
xor ebx, ebx
mov bl, BYTE [edx]
add eax, ebx
; mov BYTE [edx], 0x3
add edx, 1
jmp _loop
_exit
call {print_hex_eax}
''' % tlen
patches.append(AddEntryPointPatch(added_code,"sum"))
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend.apply_patches(patches)
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"\x00\x01\x01" + b"A"*1000 + b"\n")
print(str(tlen) + ":")
print(res, p.returncode)
nose.tools.assert_true(p.returncode==255)
expected = bytes(struct.pack(">I", tlen).hex(), "utf-8")
print(expected)
nose.tools.assert_true(res[0].startswith(expected))
def test_rwinit_memory():
filepath = os.path.join(bin_location, "CROMU_00070")
pipe = subprocess.PIPE
tlen=1
lenlist = []
lenlist.append(0)
lenlist.append(1)
#lenlist.append(4)
#lenlist.append(5)
#lenlist.append(0x501)
#lenlist.append(0x1000)
#lenlist.append(0x1000-1)
#lenlist.append(0x1000+1)
lenlist.append(0x2000+1)
for tlen in lenlist:
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(AddRWInitDataPatch(b"\x02"*tlen, "added_data_rwinit"))
patches.append(AddRODataPatch(b"0123456789abcdef", "hex_array"))
added_code = '''
; eax=buf,ebx=len
pusha
mov ecx,eax
mov edx,ebx
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
popa
ret
'''
patches.append(AddCodePatch(added_code,"print"))
added_code = '''
; print eax as hex
pusha
mov ecx,32
mov ebx,eax
_print_reg_loop:
rol ebx,4
mov edi,ebx
and edi,0x0000000f
lea eax,[{hex_array}+edi]
mov ebp,ebx
mov ebx,0x1
call {print}
mov ebx,ebp
sub ecx,4
jnz _print_reg_loop
popa
ret
'''
patches.append(AddCodePatch(added_code,"print_hex_eax"))
added_code = '''
xor eax, eax
mov edx, {added_data_rwinit}
mov ecx, edx
add ecx, %d
_loop:
cmp edx,ecx
je _exit
xor ebx, ebx
mov bl, BYTE [edx]
add eax, ebx
mov BYTE [edx], 0x3
add edx, 1
jmp _loop
_exit
call {print_hex_eax}
''' % tlen
patches.append(AddEntryPointPatch(added_code,"sum"))
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend.apply_patches(patches)
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"\x00\x01\x01" + b"A"*1000 + b"\n")
print(str(tlen) + ":")
print(res, p.returncode)
nose.tools.assert_true(p.returncode==255)
expected = bytes(struct.pack(">I", tlen * 2).hex(), "utf-8")
print(expected)
nose.tools.assert_true(res[0].startswith(expected))
def test_added_code_and_data_complex():
filepath = os.path.join(bin_location, "CROMU_00070")
pipe = subprocess.PIPE
common_patches = []
patches = []
common_patches.append(AddRODataPatch(b"ro1ro1ro1\n\x00", "added_data_ro1"))
common_patches.append(AddRWDataPatch(10, "added_data_rw1"))
common_patches.append(AddRWInitDataPatch(b"ri1ri1ri1\n\x00", "added_data_rwinit1"))
common_patches.append(AddRODataPatch(b"ro2ro2ro2\n\x00", "added_data_ro2"))
common_patches.append(AddRWDataPatch(10, "added_data_rw2"))
common_patches.append(AddRWInitDataPatch(b"ri2ri2ri2\n\x00", "added_data_rwinit2"))
common_patches.append(AddRODataPatch(b"ro3ro3ro3\n\x00", "added_data_ro3"))
common_patches.append(AddRWDataPatch(10, "added_data_rw3"))
common_patches.append(AddRWInitDataPatch(b"ri3ri3ri3\n\x00", "added_data_rwinit3"))
added_code = '''
; eax=buf,ebx=len
pusha
mov ecx,eax
mov edx,ebx
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
popa
ret
'''
common_patches.append(AddCodePatch(added_code,"print"))
added_code='''
mov eax, {added_data_ro1}
mov ebx, 10
call {print}
mov eax, {added_data_rw1}
mov ebx, 10
call {print}
mov eax, {added_data_rwinit1}
mov ebx, 10
call {print}
mov eax, {added_data_ro2}
mov ebx, 10
call {print}
mov eax, {added_data_rw2}
mov ebx, 10
call {print}
mov eax, {added_data_rwinit2}
mov ebx, 10
call {print}
mov eax, {added_data_ro3}
mov ebx, 10
call {print}
mov eax, {added_data_rw3}
mov ebx, 10
call {print}
mov eax, {added_data_rwinit3}
mov ebx, 10
call {print}
ret
'''
common_patches.append(AddCodePatch(added_code,"dump"))
with patcherex.utils.tempdir() as td:
expected = b"ro1ro1ro1\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ri1ri1ri1\nro2ro2ro2\n\x00" \
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00ri2ri2ri2\nro3ro3ro3\n\x00\x00\x00\x00" \
b"\x00\x00\x00\x00\x00\x00ri3ri3ri3\nro1ro1ro1\nDCBA\x00\x00\x00\x00\x00\x00" \
b"ri1ri1ri1\nro2ro2ro2\nHGFE\x00\x00\x00\x00\x00\x00ri2ri2ri2\nro3ro3ro3\nLKJI" \
b"\x00\x00\x00\x00\x00\x00ri3ri3ri3\nro1ro1ro1\nDCBA\x00\x00\x00\x00\x00\x00DCBA" \
b"i1ri1\nro2ro2ro2\nHGFE\x00\x00\x00\x00\x00\x00HGFEi2ri2\nro3ro3ro3\nLKJI\x00\x00" \
b"\x00\x00\x00\x00LKJIi3ri3\n\x00\x02\x00\x00\x02"
tmp_file = os.path.join(td, "patched1")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = [p for p in common_patches]
added_code = '''
call {dump}
mov DWORD [{added_data_rw1}], 0x41424344
mov DWORD [{added_data_rw2}], 0x45464748
mov DWORD [{added_data_rw3}], 0x494a4b4c
call {dump}
mov DWORD [{added_data_rwinit1}], 0x41424344
mov DWORD [{added_data_rwinit2}], 0x45464748
mov DWORD [{added_data_rwinit3}], 0x494a4b4c
call {dump}
'''
patches.append(AddEntryPointPatch(added_code))
backend.apply_patches(patches)
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
#for k,v in backend.name_map.iteritems():
#print k,hex(v)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"\x00\x01\x01" + b"A"*1000 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(expected == res[0] and p.returncode == 255, True)
expected = b"ro1ro1ro1\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ri1ri1ri1\nro2ro2ro2\n\x00\x00\x00" \
b"\x00\x00\x00\x00\x00\x00\x00ri2ri2ri2\nro3ro3ro3\n\x00\x00\x00\x00\x00\x00\x00" \
b"\x00\x00\x00ri3ri3ri3\nro1ro1ro1\nDCBA\x00\x00\x00\x00\x00\x00ri1ri1ri1\nro2ro2ro2" \
b"\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00HGFEi2ri2\nro3ro3ro3\nLKJI\x00\x00\x00\x00" \
b"\x00\x00ri3ri3ri3\n"
tmp_file = os.path.join(td, "patched2")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = [p for p in common_patches]
added_code = '''
call {dump}
mov DWORD [{added_data_rw1}], 0x41424344
mov DWORD [{added_data_rwinit2}], 0x45464748
mov DWORD [{added_data_rw3}], 0x494a4b4c
call {dump}
mov DWORD [{added_data_ro2}], 0x41424344 ;segfault with no fallback
'''
patches.append(AddEntryPointPatch(added_code))
backend.apply_patches(patches)
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
#for k,v in backend.name_map.iteritems():
# print k,hex(v)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"\x00\x01\x01" + b"A" * 1000 + b"\n")
print(res, p.returncode)
# this is a special case in which fallback we get different results if data_fallback is used!
if global_data_fallback==True:
nose.tools.assert_equal(res[0].startswith(expected) and p.returncode == 255, True)
else:
nose.tools.assert_equal(expected == res[0] and p.returncode == -11, True)
def test_added_code_and_data_big():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
test_str = bytes(range(256)) * 40
added_code = '''
mov eax, 2
mov ebx, 0
mov ecx, {added_data}
mov edx, %d
mov esi, 0
int 80h
mov eax, 1
mov ebx, 0x33
int 80h
''' % (len(test_str))
p1 = AddCodePatch(added_code, "aaa")
p2 = AddRODataPatch(test_str, "added_data")
backend.apply_patches([p1,p2])
backend.set_oep(backend.name_map["aaa"])
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
#print res, p.returncode
nose.tools.assert_equal(test_str in res[0] and p.returncode == 0x33, True)
def test_detour():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
test_str = b"qwertyuiop\n\x00"
added_code = '''
mov eax, 2
mov ebx, 0
mov ecx, {qq}
mov edx, %d
mov esi, 0
int 80h
''' % (len(test_str))
p1 = InsertCodePatch(0x80480A6, added_code)
p2 = AddRODataPatch(test_str, "qq")
backend.apply_patches([p1,p2])
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
#print res, p.returncode
expected = b"qwertyuiop\n\x00\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: \t\tYes, " \
b"that's a palindrome!\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0], expected)
def test_single_entry_point_patch():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
added_code = '''
mov eax, 2
mov ebx, 0
mov ecx, 0x08048786
mov edx, 15
mov esi, 0
int 80h
'''
p = AddEntryPointPatch(added_code)
backend.apply_patches([p])
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(b"\n\nEASTER EGG!\n\n" in res[0] and p.returncode == 0, True)
def test_complex1():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
added_code = '''
mov eax, 2
mov ebx, 0
mov ecx, 0x08048786
mov edx, 15
mov esi, 0
int 80h
call {added_function}
'''
patches.append(AddEntryPointPatch(added_code))
added_code = '''
mov eax, 1
mov ebx, 0x34
int 80h
'''
patches.append(AddEntryPointPatch(added_code))
test_str = b"testtesttest\n\x00"
added_code = '''
mov eax, 2
mov ebx, 0
mov ecx, {added_data}
mov edx, %d
mov esi, 0
int 80h
ret
''' % (len(test_str))
patches.append(AddCodePatch(added_code, "added_function"))
patches.append(AddRODataPatch(test_str, "added_data"))
backend.apply_patches(patches)
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal((b"\n\nEASTER EGG!\n\n" + test_str) in res[0] and p.returncode == 52, True)
def test_double_patch_collision():
filepath = os.path.join(bin_location, "CADET_00003")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
test_str1 = b"1111111111\n\x00"
test_str2 = b"2222222222\n\x00"
added_code1 = '''
pusha
mov eax, 2
mov ebx, 0
mov ecx, {str1}
mov edx, %d
mov esi, 0
int 80h
popa
''' % (len(test_str1))
added_code2 = '''
pusha
mov eax, 2
mov ebx, 0
mov ecx, {str2}
mov edx, %d
mov esi, 0
int 80h
popa
''' % (len(test_str2))
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p1 = InsertCodePatch(0x080480A0, added_code1, name="p1", priority=100)
p2 = InsertCodePatch(0x080480A0, added_code2, name="p2", priority=1)
p3 = AddRODataPatch(test_str1, "str1")
p4 = AddRODataPatch(test_str2, "str2")
backend.apply_patches([p1,p2,p3,p4])
backend.save(tmp_file)
nose.tools.assert_equal(p1 in backend.added_patches, True)
nose.tools.assert_equal(p2 in backend.added_patches, False)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
print(res, p.returncode)
print(map(hex,backend.touched_bytes))
expected = test_str1 + b"\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: \t\tYes, " \
b"that's a palindrome!\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0], expected)
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p1 = InsertCodePatch(0x080480A0, added_code1, name="p1", priority=1)
p2 = InsertCodePatch(0x080480A0, added_code2, name="p2", priority=100)
p3 = AddRODataPatch(test_str1, "str1")
p4 = AddRODataPatch(test_str2, "str2")
backend.apply_patches([p1,p2,p3,p4])
backend.save(tmp_file)
nose.tools.assert_equal(p1 in backend.added_patches, False)
nose.tools.assert_equal(p2 in backend.added_patches, True)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
print(res, p.returncode)
print(map(hex,backend.touched_bytes))
expected = test_str2 + b"\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: \t\tYes, " \
b"that's a palindrome!\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0], expected)
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p1 = InsertCodePatch(0x080480A0, added_code1, name="p1", priority=1)
#partial overlap
p2 = InsertCodePatch(0x080480A0+3, added_code2, name="p2", priority=100)
p3 = AddRODataPatch(test_str1, "str1")
p4 = AddRODataPatch(test_str2, "str2")
backend.apply_patches([p1,p2,p3,p4])
backend.save(tmp_file)
nose.tools.assert_equal(p1 in backend.added_patches, False)
nose.tools.assert_equal(p2 in backend.added_patches, True)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
print(res, p.returncode)
print(map(hex,backend.touched_bytes))
expected = test_str2 + b"\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: \t\tYes, " \
b"that's a palindrome!\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0], expected)
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p1 = InsertCodePatch(0x080480A0, added_code1, name="p1", priority=1)
#no overlap
p2 = InsertCodePatch(0x080480A0+0x11, added_code2, name="p2", priority=100)
p3 = AddRODataPatch(test_str1, "str1")
p4 = AddRODataPatch(test_str2, "str2")
backend.apply_patches([p1,p2,p3,p4])
backend.save(tmp_file)
nose.tools.assert_equal(p1 in backend.added_patches, True)
nose.tools.assert_equal(p2 in backend.added_patches, True)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A"*10 + b"\n")
print(res, p.returncode)
print(map(hex,backend.touched_bytes))
expected = test_str1 + test_str2 + b"\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: \t\tYes, " \
b"that's a palindrome!\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0], expected)
def test_conflicting_symbols():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
patches = []
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches.append(AddRODataPatch(b"0123456789abcdef", "aaa"))
patches.append(AddRODataPatch(b"\n", "aaa"))
exc = False
try:
backend.apply_patches(patches)
except ValueError:
exc = True
nose.tools.assert_true(exc)
patches = []
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches.append(AddRODataPatch(b"0123456789abcdef", "aaa"))
added_code = '''
; put 4 random bytes in eax
pusha
mov ebx, eax
mov eax,7
mov ecx,4
mov edx, {aaa}
int 0x80
popa
ret
'''
patches.append(AddCodePatch(added_code,"aaa"))
exc = False
try:
backend.apply_patches(patches)
except ValueError:
exc = True
nose.tools.assert_true(exc)
def test_random_canary():
def check_output(tstr):
expected = b"\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: \t\tYes, that's a palindrome!\n\n\tPlease enter a possible palindrome: canary failure: 00000000 vs "
init = b"base canary value:"
if not tstr.startswith(init):
return False
canary = tstr.split(init)[1].split()[0].strip()
if expected not in tstr:
return False
if not tstr.endswith(canary):
return False
return True
filepath = os.path.join(bin_location, "0b32aa01_01_2")
pipe = subprocess.PIPE
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(AddRODataPatch(b"0123456789abcdef", "hex_array"))
patches.append(AddRODataPatch(b"\n", "new_line"))
patches.append(AddRODataPatch(b"X"*4, "saved_canary"))
patches.append(AddRODataPatch(b"base canary value: \x00","str_bcanary"))
patches.append(AddRODataPatch(b"canary failure: \x00","str_fcanary"))
patches.append(AddRODataPatch(b" vs \x00","str_vs"))
added_code = '''
; print eax as hex
pusha
mov ecx,32
mov ebx,eax
_print_reg_loop:
rol ebx,4
mov edi,ebx
and edi,0x0000000f
lea eax,[{hex_array}+edi]
mov ebp,ebx
mov ebx,0x1
call {print}
mov ebx,ebp
sub ecx,4
jnz _print_reg_loop
popa
ret
'''
patches.append(AddCodePatch(added_code,"print_hex_eax"))
added_code = '''
; eax=buf,ebx=len
pusha
mov ecx,eax
mov edx,ebx
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
popa
ret
'''
patches.append(AddCodePatch(added_code,"print"))
added_code = '''
mov ebx, eax
mov eax, 0x1
int 80h
'''
patches.append(AddCodePatch(added_code,"exit_eax"))
added_code = '''
; put 4 random bytes in eax
pusha
mov ebx, eax
mov eax,7
mov ecx,4
mov edx,0
int 0x80
popa
ret
'''
patches.append(AddCodePatch(added_code,"random"))
added_code = '''
; print a null terminated string pointed by eax
pusha
mov ecx, eax
_loop:
cmp BYTE [ecx],0
je _out
mov edx,1
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
inc ecx
jmp _loop
_out:
popa
ret
'''
patches.append(AddCodePatch(added_code,"print_str"))
added_code = '''
; print a null terminated string pointed by eax
push eax
mov eax, {str_fcanary}
call {print_str}
pop eax
call {print_hex_eax}
mov eax, {str_vs}
call {print_str}
mov eax, [{saved_canary}]
call {print_hex_eax}
mov eax, 0x44
call {exit_eax}
'''
patches.append(AddCodePatch(added_code,"canary_check_fail"))
added_code = '''
mov eax, {saved_canary}
call {random}
xor eax, eax
mov eax, {str_bcanary}
call {print_str}
mov eax, [{saved_canary}]
call {print_hex_eax}
'''
patches.append(AddEntryPointPatch(added_code))
added_code = '''
push DWORD [{saved_canary}]
'''
patches.append(InsertCodePatch(0x08048230, added_code, "canary_push1"))
added_code = '''
push eax ; avoid changing eax
mov eax, dword [esp+4]
cmp eax, DWORD [{saved_canary}]
jne {canary_check_fail}
pop eax
add esp, 4
'''
patches.append(InsertCodePatch(0x080483FF, added_code, "canary_pop1"))
backend.apply_patches(patches)
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A"*10 + b"\n" + b"\x00"*100)
print(res, p.returncode)
nose.tools.assert_equal(check_output(res[0]) and p.returncode == 0x44, True)
def test_patch_conflicts():
def create_dpatch(tstr, addr, p):
code = '''
push ecx
mov ecx, {s%s}
call {print}
pop ecx
''' % tstr
return InsertCodePatch(addr, code, tstr, priority=p)
def expected_str(plist):
tstr = b""
for p in plist:
tstr += bytes(p.name, "utf-8") + b"\n\x00"
return tstr + base_str
def create_patches():
p11=create_dpatch("11", 0x08049920, 2)
p12=create_dpatch("12", 0x08049920+1, 1)
p21=create_dpatch("21", 0x0804992F, 2)
p22=create_dpatch("22", 0x0804992F+0, 1)
p31=create_dpatch("31", 0x08049947, 2)
p32=create_dpatch("32", 0x08049947+0, 1)
p41=create_dpatch("41", 0x08049953, 2)
p42=create_dpatch("42", 0x08049953+3, 1)
return p11,p12,p21,p22,p31,p32,p41,p42
filepath = os.path.join(bin_location, "CROMU_00071")
pipe = subprocess.PIPE
base_str = b"Database checksum: "
cpatches = []
cpatches.append(AddRODataPatch(b"11\n\x00", "s11"))
cpatches.append(AddRODataPatch(b"12\n\x00", "s12"))
cpatches.append(AddRODataPatch(b"21\n\x00", "s21"))
cpatches.append(AddRODataPatch(b"22\n\x00", "s22"))
cpatches.append(AddRODataPatch(b"31\n\x00", "s31"))
cpatches.append(AddRODataPatch(b"32\n\x00", "s32"))
cpatches.append(AddRODataPatch(b"41\n\x00", "s41"))
cpatches.append(AddRODataPatch(b"42\n\x00", "s42"))
added_code = '''
pusha
mov eax, 2
mov ebx, 0
mov edx, 4
mov esi, 0
int 80h
popa
ret
'''
cpatches.append(AddCodePatch(added_code,"print"))
with patcherex.utils.tempdir() as td:
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
backend.apply_patches(cpatches)
backend.save(tmp_file)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([])
print(repr(estr))
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
backend.apply_patches(cpatches+[p11])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A"*10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11])
print(repr(estr))
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
backend.apply_patches(cpatches+[p11,p21,p31,p41])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A"*10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p21,p31,p41])
print(repr(estr))
nose.tools.assert_true(res[0].startswith(estr))
'''
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
backend.apply_patches(cpatches+[p12,p22,p32,p42])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p12,p22,p32,p42])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
backend.apply_patches(cpatches+[p11,p21,p31,p41,p12])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p21,p31,p41])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p31.dependencies = [p12]
backend.apply_patches(cpatches+[p11,p21,p31,p41,p12])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p21,p41])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p11.dependencies = [p12]
p21.dependencies = [p12]
p31.dependencies = [p12]
p41.dependencies = [p12]
backend.apply_patches(cpatches+[p11,p21,p31,p41,p12])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p31.dependencies = [p12]
p12.dependencies = [p22]
p22.dependencies = [p31]
backend.apply_patches(cpatches+[p31,p12,p22])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p12,p22,p31])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p31.dependencies = [p12]
p12.dependencies = [p22]
p22.dependencies = [p31]
backend.apply_patches(cpatches+[p31,p12,p22,p11])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p11.dependencies = [p12]
backend.apply_patches(cpatches+[p11,p21,p31,p41,p12])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p21,p31,p41])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p21.dependencies = [p12]
backend.apply_patches(cpatches+[p11,p21,p31,p41,p12])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p31,p41])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
'''
'''
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p31.dependencies = [p12]
backend.apply_patches(cpatches+[p11,p21,p31,p41,p12])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p21,p41])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p41.dependencies = [p12]
backend.apply_patches(cpatches+[p11,p21,p31,p41,p12])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p21,p31])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p31.dependencies = [p12]
p21.dependencies = [p42]
backend.apply_patches(cpatches+[p11,p21,p31,p12,p42])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p21,p42])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p31.dependencies = [p12]
p21.dependencies = [p42]
backend.apply_patches(cpatches+[p11,p21,p31,p12,p42,p41])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p11,p41])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p11.dependencies = [p42]
p31.dependencies = [p12]
p21.dependencies = [p42]
backend.apply_patches(cpatches+[p11,p21,p31,p12,p42,p41])
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate("A"*10+"\n")
print res, p.returncode
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p41])
print repr(estr)
nose.tools.assert_true(res[0].startswith(estr))
'''
p11,p12,p21,p22,p31,p32,p41,p42 = create_patches()
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
p11.dependencies = [p21,p32]
backend.apply_patches(cpatches+[p11,p21,p31,p32])
backend.save(tmp_file)
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate(b"A" * 10 + b"\n")
print(res, p.returncode)
nose.tools.assert_equal(p.returncode,1)
estr = expected_str([p21,p31])
print(repr(estr))
nose.tools.assert_true(res[0].startswith(estr))
def test_c_compilation():
filepath = os.path.join(bin_location, "CADET_00003")
pipe = subprocess.PIPE
common_patches = []
added_code = '''
; print eax as hex
pusha
mov ecx,32
mov ebx,eax
_print_reg_loop:
rol ebx,4
mov edi,ebx
and edi,0x0000000f
lea eax,[{hex_array}+edi]
mov ebp,ebx
mov ebx,0x1
call {print}
mov ebx,ebp
sub ecx,4
jnz _print_reg_loop
popa
ret
'''
common_patches.append(AddCodePatch(added_code,"print_hex_eax"))
added_code = '''
; eax=buf,ebx=len
pusha
mov ecx,eax
mov edx,ebx
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
popa
ret
'''
common_patches.append(AddCodePatch(added_code,"print"))
common_patches.append(AddRODataPatch(b"0123456789abcdef", "hex_array"))
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.extend(common_patches)
added_code = '''
push ecx
push edx
mov ecx, 0x10
mov edx, 0x20
%s
call {print_hex_eax}
pop ecx
pop edx
''' % patcherex.utils.get_nasm_c_wrapper_code("c_function",get_return=True)
patches.append(InsertCodePatch(0x080480a0, added_code, name="p1", priority=1))
added_code = '''
__attribute__((fastcall)) int sub1(int a, int b){
int c = a*b + 37;
return c;
}
'''
patches.append(AddCodePatch(added_code,"c_function",is_c=True))
backend.apply_patches(patches)
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate()
nose.tools.assert_equal(p.returncode,0)
expected = b"00000225\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0],expected)
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(AddRWDataPatch(10, "memory_area"))
patches.extend(common_patches)
added_code = '''
push ecx
push edx
mov ecx, 0x10
mov edx, {memory_area}
mov eax, 0x100
%s
call {print_hex_eax}
mov eax, DWORD [{memory_area}]
call {print_hex_eax}
pop ecx
pop edx
''' % patcherex.utils.get_nasm_c_wrapper_code("c_function",get_return=False)
patches.append(InsertCodePatch(0x080480a0, added_code, name="p1", priority=1))
added_code = '''
__attribute__((fastcall)) void sub1(int a, unsigned int* b){
*b = a*3+2;
return;
}
'''
patches.append(AddCodePatch(added_code,"c_function",is_c=True))
backend.apply_patches(patches)
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate()
nose.tools.assert_equal(p.returncode,0)
expected = b"0000010000000032\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0],expected)
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(AddRWDataPatch(10, "memory_area"))
patches.extend(common_patches)
added_code = '''
push ecx
push edx
mov ecx, 0x10
mov edx, {memory_area}
mov eax, 0x100
%s
call {print_hex_eax}
mov eax, DWORD [{memory_area}]
call {print_hex_eax}
pop ecx
pop edx
''' % patcherex.utils.get_nasm_c_wrapper_code("c_function",get_return=True)
patches.append(InsertCodePatch(0x080480a0, added_code, name="p1", priority=1))
added_code = '''
__attribute__((fastcall)) void sub1(int a, unsigned int* b){
*b = a*3+2;
return;
}
'''
patches.append(AddCodePatch(added_code,"c_function",is_c=True))
backend.apply_patches(patches)
backend.save(tmp_file)
#backend.save("../../vm/shared/patched")
p = subprocess.Popen([qemu_location, tmp_file], stdin=pipe, stdout=pipe, stderr=pipe)
res = p.communicate()
nose.tools.assert_equal(p.returncode,0)
expected = b"0000003200000032\nWelcome to Palindrome Finder\n\n\tPlease enter a possible palindrome: "
nose.tools.assert_equal(res[0],expected)
def test_entrypointpatch_restore():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(InsertCodePatch(0x80480a0, "jmp 0x4567890", "goto_crash"))
backend.apply_patches(patches)
backend.save(tmp_file)
res = QEMURunner(tmp_file, b"00000001\n", record_stdout=True, record_core=True)
original_reg_value = res.reg_vals
nose.tools.assert_equal(original_reg_value['eip'], 0x4567890)
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(InsertCodePatch(0x80480a0, "jmp 0x4567890", "goto_crash"))
patches.append(AddEntryPointPatch("mov eax, 0x34567890", name="entry_patch1"))
backend.apply_patches(patches)
backend.save(tmp_file)
res = QEMURunner(tmp_file, b"00000001\n", record_stdout=True, record_core=True)
nose.tools.assert_equal(original_reg_value, res.reg_vals)
backend = DetourBackend(filepath,data_fallback=global_data_fallback,try_pdf_removal=global_try_pdf_removal)
patches = []
patches.append(InsertCodePatch(0x80480a0, "jmp 0x4567890", "goto_crash"))
patches.append(AddEntryPointPatch("mov eax, 0x34567890", after_restore=True, name="entry_patch2"))
backend.apply_patches(patches)
backend.save(tmp_file)
res = QEMURunner(tmp_file, b"00000001\n", record_stdout=True, record_core=True)
original_reg_value_mod = dict(original_reg_value)
original_reg_value_mod['eax'] = 0x34567890
nose.tools.assert_equal(original_reg_value_mod, res.reg_vals)
def test_piling():
filepath = os.path.join(bin_location, "0b32aa01_01_2")
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
backend = DetourBackend(filepath,data_fallback=global_data_fallback)
patches = []
code_print_a = "mov eax, 2; \n" \
"mov ebx, 1; \n" \
"mov ecx, {the_first_string}; \n" \
"mov edx, 13; \n" \
"mov esi, 0; \n" \
"int 0x80;"
code_print_b = "mov eax, 2; \n" \
"mov ebx, 1; \n" \
"mov ecx, {the_second_string}; \n" \
"mov edx, 8; \n" \
"mov esi, 0; \n" \
"int 0x80;"
patches.append(AddRODataPatch(b"does it work\n", "the_first_string"))
patches.append(AddRODataPatch(b"nope no\n", name="the_second_string"))
patches.append(InsertCodePatch(0x80480a0, code_print_a, "test_code"))
patches.append(InsertCodePatch(0x80480a0, code_print_b, name="second_add_code_patch", stackable=True))
backend.apply_patches(patches)
backend.save(tmp_file)
res = QEMURunner(tmp_file, b"abcdefg\n", record_stdout=True)
expected = \
b"""does it work
nope no
Welcome to Palindrome Finder
\tPlease enter a possible palindrome: Nope, that's not a palindrome
\tPlease enter a possible palindrome: """
nose.tools.assert_true(res.stdout.startswith(expected))
def test_pdf_removal():
# I am not using the decorator since I want to test for diffs between pdf removal or not
# also, this test will obviously fail with any backend moving things in memory
# I try to print all ro and rw data and compare between pdf and not pdf
tests = [
(os.path.join(bin_location, "CROMU_00071"),
0x0804D790, 0x0804D9B8, 0x08062BD8, 0x08062BEC),
(os.path.join(bin_location, "KPRCA_00046"),
0x0804F868, 0x0805007C, 0x08064298, 0x0806891C)
]
with patcherex.utils.tempdir() as td:
tmp_file = os.path.join(td, "patched")
for filepath, ro_start, ro_end, rw_start, rw_end in tests:
patches = []
osize = os.path.getsize(filepath)
patches.append(AddRODataPatch(b"0123456789abcdef", "hex_array"))
added_code = '''
; eax=buf,ebx=len
pusha
mov ecx,eax
mov edx,ebx
mov eax,0x2
mov ebx,0x1
mov esi,0x0
int 0x80
popa
ret
'''
patches.append(AddCodePatch(added_code,"print"))
added_code = '''
; print eax as hex
pusha
mov ecx,32
mov ebx,eax
_print_reg_loop:
rol ebx,4
mov edi,ebx
and edi,0x0000000f
lea eax,[{hex_array}+edi]
mov ebp,ebx
mov ebx,0x1
call {print}
mov ebx,ebp
sub ecx,4
jnz _print_reg_loop
popa
ret
'''
patches.append(AddCodePatch(added_code,"print_hex_eax"))
code = '''
mov ebx, 0x%08x
mov ecx, 0x%08x
mov edx, 0x%08x
mov esi, 0x%08x
mov edi, ebx
_loop1:
mov eax, DWORD [edi]
call {print_hex_eax}
cmp edi, ecx
jg _exit1
add edi, 4
jmp _loop1
_exit1:
mov edi, edx
_loop2:
mov eax, DWORD [edi]
mov DWORD [edi], 0
call {print_hex_eax}
cmp edi, esi
jg _exit2
add edi, 4
jmp _loop2
_exit2:
''' %(ro_start, ro_end, rw_start, rw_end)
patches.append(AddEntryPointPatch(code))
data_fallback = False
backend = DetourBackend(filepath,data_fallback,try_pdf_removal=False)
backend.apply_patches(patches)
backend.save(tmp_file)
# backend.save("../../vm/shared/patched")
res = QEMURunner(tmp_file, b"\n", record_stdout=True)
nose.tools.assert_equal(res.reg_vals, None)
original = res.stdout
print(filepath)
print(original)
backend = DetourBackend(filepath,data_fallback,try_pdf_removal=True)
backend.apply_patches(patches)
backend.save(tmp_file)
res = QEMURunner(tmp_file, b"\n", record_stdout=True)
nose.tools.assert_equal(res.reg_vals, None)
mod = res.stdout
fsize = os.path.getsize(tmp_file)
print(hex(fsize), hex(osize))
nose.tools.assert_true((osize - fsize) > 0x10000)
nose.tools.assert_true(backend.pdf_removed)
nose.tools.assert_equal(original,mod)
data_fallback = True
backend = DetourBackend(filepath,data_fallback,try_pdf_removal=True)
backend.apply_patches(patches)
backend.save(tmp_file)
res = QEMURunner(tmp_file, b"\n", record_stdout=True)
nose.tools.assert_equal(res.reg_vals, None)
mod = res.stdout
fsize = os.path.getsize(tmp_file)
print(hex(fsize), hex(osize))
nose.tools.assert_true((osize - fsize) > 0x10000)
nose.tools.assert_true(backend.pdf_removed)
nose.tools.assert_equal(original,mod)
def run_all():
functions = globals()
all_functions = dict(filter((lambda x: x[0].startswith('test_')), functions.items()))
for f in sorted(all_functions.keys()):
if hasattr(all_functions[f], '__call__'):
l.info("testing %s" % str(f))
all_functions[f]()
if __name__ == "__main__":
import sys
logging.getLogger("patcherex.backends.DetourBackend").setLevel("INFO")
logging.getLogger("patcherex.test.test_detourbackend").setLevel("INFO")
if len(sys.argv) > 1:
globals()['test_' + sys.argv[1]]()
else:
run_all()
| 38.305211 | 193 | 0.594254 | 7,660 | 61,748 | 4.614099 | 0.061488 | 0.024757 | 0.034207 | 0.031123 | 0.861702 | 0.840482 | 0.825515 | 0.801154 | 0.788903 | 0.768843 | 0 | 0.061329 | 0.289402 | 61,748 | 1,611 | 194 | 38.328988 | 0.744177 | 0.026819 | 0 | 0.759639 | 0 | 0.013946 | 0.347267 | 0.029661 | 0 | 0 | 0.01702 | 0 | 0.0484 | 1 | 0.021329 | false | 0 | 0.009844 | 0 | 0.038556 | 0.088597 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
81af01e50c1212d1c6f1f39bb08cae6bc92ef596 | 521 | py | Python | python/testData/inspections/PyCompatibilityInspection/equalitySignInFStrings.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/inspections/PyCompatibilityInspection/equalitySignInFStrings.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | null | null | null | python/testData/inspections/PyCompatibilityInspection/equalitySignInFStrings.py | Tasemo/intellij-community | 50aeaf729b7073e91c7c77487a1f155e0dfe3fcd | [
"Apache-2.0"
] | null | null | null | import math
print(<warning descr="Python versions 2.7, 3.5 do not support a 'F' prefix">f</warning>'{math.pi <warning descr="Python versions 2.7, 3.5, 3.6, 3.7 do not support equality signs in f-strings">=</warning>:.2f}')
print(<warning descr="Python versions 2.7, 3.5 do not support a 'F' prefix">f</warning>'{<warning descr="Python versions 2.7, 3.5 do not support a 'F' prefix">f</warning>"{3.1415<warning descr="Python versions 2.7, 3.5, 3.6, 3.7 do not support equality signs in f-strings">=</warning>:.1f}":*^20}') | 173.666667 | 298 | 0.696737 | 101 | 521 | 3.594059 | 0.267327 | 0.165289 | 0.247934 | 0.358127 | 0.92562 | 0.92562 | 0.92562 | 0.92562 | 0.92562 | 0.92562 | 0 | 0.08026 | 0.115163 | 521 | 3 | 298 | 173.666667 | 0.707158 | 0 | 0 | 0 | 0 | 0.666667 | 0.821839 | 0.183908 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.333333 | null | null | 0.666667 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 15 |
81c5d43fbd8fc52e61f3a8d909f4468a501cb92c | 7,196 | py | Python | toolchain/riscv/MSYS/python/Lib/test/test_int_literal.py | zhiqiang-hu/bl_iot_sdk | 154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d | [
"Apache-2.0"
] | 207 | 2018-10-01T08:53:01.000Z | 2022-03-14T12:15:54.000Z | toolchain/riscv/MSYS/python/Lib/test/test_int_literal.py | zhiqiang-hu/bl_iot_sdk | 154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d | [
"Apache-2.0"
] | 8 | 2019-06-29T14:18:51.000Z | 2022-02-19T07:30:27.000Z | toolchain/riscv/MSYS/python/Lib/test/test_int_literal.py | zhiqiang-hu/bl_iot_sdk | 154ee677a8cc6a73e6a42a5ff12a8edc71e6d15d | [
"Apache-2.0"
] | 76 | 2020-03-16T01:47:46.000Z | 2022-03-21T16:37:07.000Z | """Test correct treatment of hex/oct constants.
This is complex because of changes due to PEP 237.
"""
import unittest
class TestHexOctBin(unittest.TestCase):
def test_hex_baseline(self):
# A few upper/lowercase tests
self.assertEqual(0x0, 0X0)
self.assertEqual(0x1, 0X1)
self.assertEqual(0x123456789abcdef, 0X123456789abcdef)
# Baseline tests
self.assertEqual(0x0, 0)
self.assertEqual(0x10, 16)
self.assertEqual(0x7fffffff, 2147483647)
self.assertEqual(0x7fffffffffffffff, 9223372036854775807)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0x0), 0)
self.assertEqual(-(0x10), -16)
self.assertEqual(-(0x7fffffff), -2147483647)
self.assertEqual(-(0x7fffffffffffffff), -9223372036854775807)
# Ditto with a minus sign and NO parentheses
self.assertEqual(-0x0, 0)
self.assertEqual(-0x10, -16)
self.assertEqual(-0x7fffffff, -2147483647)
self.assertEqual(-0x7fffffffffffffff, -9223372036854775807)
def test_hex_unsigned(self):
# Positive constants
self.assertEqual(0x80000000, 2147483648)
self.assertEqual(0xffffffff, 4294967295)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0x80000000), -2147483648)
self.assertEqual(-(0xffffffff), -4294967295)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0x80000000, -2147483648)
self.assertEqual(-0xffffffff, -4294967295)
# Positive constants
self.assertEqual(0x8000000000000000, 9223372036854775808)
self.assertEqual(0xffffffffffffffff, 18446744073709551615)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0x8000000000000000), -9223372036854775808)
self.assertEqual(-(0xffffffffffffffff), -18446744073709551615)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0x8000000000000000, -9223372036854775808)
self.assertEqual(-0xffffffffffffffff, -18446744073709551615)
def test_oct_baseline(self):
# A few upper/lowercase tests
self.assertEqual(0o0, 0O0)
self.assertEqual(0o1, 0O1)
self.assertEqual(0o1234567, 0O1234567)
# Baseline tests
self.assertEqual(0o0, 0)
self.assertEqual(0o20, 16)
self.assertEqual(0o17777777777, 2147483647)
self.assertEqual(0o777777777777777777777, 9223372036854775807)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0o0), 0)
self.assertEqual(-(0o20), -16)
self.assertEqual(-(0o17777777777), -2147483647)
self.assertEqual(-(0o777777777777777777777), -9223372036854775807)
# Ditto with a minus sign and NO parentheses
self.assertEqual(-0o0, 0)
self.assertEqual(-0o20, -16)
self.assertEqual(-0o17777777777, -2147483647)
self.assertEqual(-0o777777777777777777777, -9223372036854775807)
def test_oct_unsigned(self):
# Positive constants
self.assertEqual(0o20000000000, 2147483648)
self.assertEqual(0o37777777777, 4294967295)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0o20000000000), -2147483648)
self.assertEqual(-(0o37777777777), -4294967295)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0o20000000000, -2147483648)
self.assertEqual(-0o37777777777, -4294967295)
# Positive constants
self.assertEqual(0o1000000000000000000000, 9223372036854775808)
self.assertEqual(0o1777777777777777777777, 18446744073709551615)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0o1000000000000000000000), -9223372036854775808)
self.assertEqual(-(0o1777777777777777777777), -18446744073709551615)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0o1000000000000000000000, -9223372036854775808)
self.assertEqual(-0o1777777777777777777777, -18446744073709551615)
def test_bin_baseline(self):
# A few upper/lowercase tests
self.assertEqual(0b0, 0B0)
self.assertEqual(0b1, 0B1)
self.assertEqual(0b10101010101, 0B10101010101)
# Baseline tests
self.assertEqual(0b0, 0)
self.assertEqual(0b10000, 16)
self.assertEqual(0b1111111111111111111111111111111, 2147483647)
self.assertEqual(0b111111111111111111111111111111111111111111111111111111111111111, 9223372036854775807)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0b0), 0)
self.assertEqual(-(0b10000), -16)
self.assertEqual(-(0b1111111111111111111111111111111), -2147483647)
self.assertEqual(-(0b111111111111111111111111111111111111111111111111111111111111111), -9223372036854775807)
# Ditto with a minus sign and NO parentheses
self.assertEqual(-0b0, 0)
self.assertEqual(-0b10000, -16)
self.assertEqual(-0b1111111111111111111111111111111, -2147483647)
self.assertEqual(-0b111111111111111111111111111111111111111111111111111111111111111, -9223372036854775807)
def test_bin_unsigned(self):
# Positive constants
self.assertEqual(0b10000000000000000000000000000000, 2147483648)
self.assertEqual(0b11111111111111111111111111111111, 4294967295)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0b10000000000000000000000000000000), -2147483648)
self.assertEqual(-(0b11111111111111111111111111111111), -4294967295)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0b10000000000000000000000000000000, -2147483648)
self.assertEqual(-0b11111111111111111111111111111111, -4294967295)
# Positive constants
self.assertEqual(0b1000000000000000000000000000000000000000000000000000000000000000, 9223372036854775808)
self.assertEqual(0b1111111111111111111111111111111111111111111111111111111111111111, 18446744073709551615)
# Ditto with a minus sign and parentheses
self.assertEqual(-(0b1000000000000000000000000000000000000000000000000000000000000000), -9223372036854775808)
self.assertEqual(-(0b1111111111111111111111111111111111111111111111111111111111111111), -18446744073709551615)
# Ditto with a minus sign and NO parentheses
# This failed in Python 2.2 through 2.2.2 and in 2.3a1
self.assertEqual(-0b1000000000000000000000000000000000000000000000000000000000000000, -9223372036854775808)
self.assertEqual(-0b1111111111111111111111111111111111111111111111111111111111111111, -18446744073709551615)
if __name__ == "__main__":
unittest.main()
| 49.972222 | 119 | 0.701918 | 634 | 7,196 | 7.935331 | 0.137224 | 0.241503 | 0.035778 | 0.053667 | 0.910555 | 0.900417 | 0.883125 | 0.883125 | 0.76446 | 0.734645 | 0 | 0.408192 | 0.219705 | 7,196 | 143 | 120 | 50.321678 | 0.487801 | 0.195247 | 0 | 0 | 0 | 0 | 0.001427 | 0 | 0 | 0 | 0.056903 | 0 | 0.89011 | 1 | 0.065934 | false | 0 | 0.010989 | 0 | 0.087912 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
c4924c130a28dbdc9672bb5492f3be341392c159 | 20,779 | py | Python | python/mxnet/gluon/contrib/cnn/conv_layers.py | mchoi8739/incubator-mxnet | cff583250479b31c394f568ffb835b720cb84dc4 | [
"Apache-2.0"
] | 13 | 2016-04-01T03:19:44.000Z | 2019-10-17T13:30:09.000Z | python/mxnet/gluon/contrib/cnn/conv_layers.py | mchoi8739/incubator-mxnet | cff583250479b31c394f568ffb835b720cb84dc4 | [
"Apache-2.0"
] | 82 | 2016-03-29T02:40:02.000Z | 2021-02-06T22:20:40.000Z | python/mxnet/gluon/contrib/cnn/conv_layers.py | mchoi8739/incubator-mxnet | cff583250479b31c394f568ffb835b720cb84dc4 | [
"Apache-2.0"
] | 3 | 2021-07-20T07:40:15.000Z | 2021-08-03T08:39:17.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
"""Custom convolutional neural network layers in model_zoo."""
__all__ = ['DeformableConvolution', 'ModulatedDeformableConvolution']
from .... import symbol
from ...block import HybridBlock
from ....base import numeric_types
from ...nn import Activation
class DeformableConvolution(HybridBlock):
"""2-D Deformable Convolution v_1 (Dai, 2017).
Normal Convolution uses sampling points in a regular grid, while the sampling
points of Deformablem Convolution can be offset. The offset is learned with a
separate convolution layer during the training. Both the convolution layer for
generating the output features and the offsets are included in this gluon layer.
Parameters
----------
channels : int,
The dimensionality of the output space
i.e. the number of output channels in the convolution.
kernel_size : int or tuple/list of 2 ints, (Default value = (1,1))
Specifies the dimensions of the convolution window.
strides : int or tuple/list of 2 ints, (Default value = (1,1))
Specifies the strides of the convolution.
padding : int or tuple/list of 2 ints, (Default value = (0,0))
If padding is non-zero, then the input is implicitly zero-padded
on both sides for padding number of points.
dilation : int or tuple/list of 2 ints, (Default value = (1,1))
Specifies the dilation rate to use for dilated convolution.
groups : int, (Default value = 1)
Controls the connections between inputs and outputs.
At groups=1, all inputs are convolved to all outputs.
At groups=2, the operation becomes equivalent to having two convolution
layers side by side, each seeing half the input channels, and producing
half the output channels, and both subsequently concatenated.
num_deformable_group : int, (Default value = 1)
Number of deformable group partitions.
layout : str, (Default value = NCHW)
Dimension ordering of data and weight. Can be 'NCW', 'NWC', 'NCHW',
'NHWC', 'NCDHW', 'NDHWC', etc. 'N', 'C', 'H', 'W', 'D' stands for
batch, channel, height, width and depth dimensions respectively.
Convolution is performed over 'D', 'H', and 'W' dimensions.
use_bias : bool, (Default value = True)
Whether the layer for generating the output features uses a bias vector.
in_channels : int, (Default value = 0)
The number of input channels to this layer. If not specified,
initialization will be deferred to the first time `forward` is called
and input channels will be inferred from the shape of input data.
activation : str, (Default value = None)
Activation function to use. See :func:`~mxnet.ndarray.Activation`.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
weight_initializer : str or `Initializer`, (Default value = None)
Initializer for the `weight` weights matrix for the convolution layer
for generating the output features.
bias_initializer : str or `Initializer`, (Default value = zeros)
Initializer for the bias vector for the convolution layer
for generating the output features.
offset_weight_initializer : str or `Initializer`, (Default value = zeros)
Initializer for the `weight` weights matrix for the convolution layer
for generating the offset.
offset_bias_initializer : str or `Initializer`, (Default value = zeros),
Initializer for the bias vector for the convolution layer
for generating the offset.
offset_use_bias: bool, (Default value = True)
Whether the layer for generating the offset uses a bias vector.
Inputs:
- **data**: 4D input tensor with shape
`(batch_size, in_channels, height, width)` when `layout` is `NCHW`.
For other layouts shape is permuted accordingly.
Outputs:
- **out**: 4D output tensor with shape
`(batch_size, channels, out_height, out_width)` when `layout` is `NCHW`.
out_height and out_width are calculated as::
out_height = floor((height+2*padding[0]-dilation[0]*(kernel_size[0]-1)-1)/stride[0])+1
out_width = floor((width+2*padding[1]-dilation[1]*(kernel_size[1]-1)-1)/stride[1])+1
"""
def __init__(self, channels, kernel_size=(1, 1), strides=(1, 1), padding=(0, 0), dilation=(1, 1), groups=1,
num_deformable_group=1, layout='NCHW', use_bias=True, in_channels=0, activation=None,
weight_initializer=None, bias_initializer='zeros',
offset_weight_initializer='zeros', offset_bias_initializer='zeros', offset_use_bias=True,
op_name='DeformableConvolution', adj=None, prefix=None, params=None):
super(DeformableConvolution, self).__init__(prefix=prefix, params=params)
with self.name_scope():
self._channels = channels
self._in_channels = in_channels
assert layout in ('NCHW', 'NHWC'), "Only supports 'NCHW' and 'NHWC' layout for now"
if isinstance(kernel_size, numeric_types):
kernel_size = (kernel_size,) * 2
if isinstance(strides, numeric_types):
strides = (strides,) * len(kernel_size)
if isinstance(padding, numeric_types):
padding = (padding,) * len(kernel_size)
if isinstance(dilation, numeric_types):
dilation = (dilation,) * len(kernel_size)
self._op_name = op_name
offset_channels = 2 * kernel_size[0] * kernel_size[1] * num_deformable_group
self._kwargs_offset = {
'kernel': kernel_size, 'stride': strides, 'dilate': dilation,
'pad': padding, 'num_filter': offset_channels, 'num_group': groups,
'no_bias': not offset_use_bias, 'layout': layout}
self._kwargs_deformable_conv = {
'kernel': kernel_size, 'stride': strides, 'dilate': dilation,
'pad': padding, 'num_filter': channels, 'num_group': groups,
'num_deformable_group': num_deformable_group,
'no_bias': not use_bias, 'layout': layout}
if adj:
self._kwargs_offset['adj'] = adj
self._kwargs_deformable_conv['adj'] = adj
dshape = [0] * (len(kernel_size) + 2)
dshape[layout.find('N')] = 1
dshape[layout.find('C')] = in_channels
op = getattr(symbol, 'Convolution')
offset = op(symbol.var('data', shape=dshape), **self._kwargs_offset)
offsetshapes = offset.infer_shape_partial()[0]
self.offset_weight = self.params.get('offset_weight', shape=offsetshapes[1],
init=offset_weight_initializer,
allow_deferred_init=True)
if offset_use_bias:
self.offset_bias = self.params.get('offset_bias', shape=offsetshapes[2],
init=offset_bias_initializer,
allow_deferred_init=True)
else:
self.offset_bias = None
deformable_conv_weight_shape = [0] * (len(kernel_size) + 2)
deformable_conv_weight_shape[0] = channels
deformable_conv_weight_shape[2] = kernel_size[0]
deformable_conv_weight_shape[3] = kernel_size[1]
self.deformable_conv_weight = self.params.get('deformable_conv_weight',
shape=deformable_conv_weight_shape,
init=weight_initializer,
allow_deferred_init=True)
if use_bias:
self.deformable_conv_bias = self.params.get('deformable_conv_bias', shape=(channels,),
init=bias_initializer,
allow_deferred_init=True)
else:
self.deformable_conv_bias = None
if activation:
self.act = Activation(activation, prefix=activation + '_')
else:
self.act = None
def hybrid_forward(self, F, x, offset_weight, deformable_conv_weight, offset_bias=None, deformable_conv_bias=None):
if offset_bias is None:
offset = F.Convolution(x, offset_weight, cudnn_off=True, **self._kwargs_offset)
else:
offset = F.Convolution(x, offset_weight, offset_bias, cudnn_off=True, **self._kwargs_offset)
if deformable_conv_bias is None:
act = F.contrib.DeformableConvolution(data=x, offset=offset, weight=deformable_conv_weight,
name='fwd', **self._kwargs_deformable_conv)
else:
act = F.contrib.DeformableConvolution(data=x, offset=offset, weight=deformable_conv_weight,
bias=deformable_conv_bias, name='fwd',
**self._kwargs_deformable_conv)
if self.act:
act = self.act(act)
return act
def _alias(self):
return 'deformable_conv'
def __repr__(self):
s = '{name}({mapping}, kernel_size={kernel}, stride={stride}'
len_kernel_size = len(self._kwargs_deformable_conv['kernel'])
if self._kwargs_deformable_conv['pad'] != (0,) * len_kernel_size:
s += ', padding={pad}'
if self._kwargs_deformable_conv['dilate'] != (1,) * len_kernel_size:
s += ', dilation={dilate}'
if hasattr(self, 'out_pad') and self.out_pad != (0,) * len_kernel_size:
s += ', output_padding={out_pad}'.format(out_pad=self.out_pad)
if self._kwargs_deformable_conv['num_group'] != 1:
s += ', groups={num_group}'
if self.deformable_conv_bias is None:
s += ', bias=False'
if self.act:
s += ', {}'.format(self.act)
s += ')'
shape = self.deformable_conv_weight.shape
return s.format(name=self.__class__.__name__,
mapping='{0} -> {1}'.format(shape[1] if shape[1] else None, shape[0]),
**self._kwargs_deformable_conv)
class ModulatedDeformableConvolution(HybridBlock):
"""2-D Deformable Convolution v2 (Dai, 2018).
The modulated deformable convolution operation is described in https://arxiv.org/abs/1811.11168
Parameters
----------
channels : int,
The dimensionality of the output space
i.e. the number of output channels in the convolution.
kernel_size : int or tuple/list of 2 ints, (Default value = (1,1))
Specifies the dimensions of the convolution window.
strides : int or tuple/list of 2 ints, (Default value = (1,1))
Specifies the strides of the convolution.
padding : int or tuple/list of 2 ints, (Default value = (0,0))
If padding is non-zero, then the input is implicitly zero-padded
on both sides for padding number of points.
dilation : int or tuple/list of 2 ints, (Default value = (1,1))
Specifies the dilation rate to use for dilated convolution.
groups : int, (Default value = 1)
Controls the connections between inputs and outputs.
At groups=1, all inputs are convolved to all outputs.
At groups=2, the operation becomes equivalent to having two convolution
layers side by side, each seeing half the input channels, and producing
half the output channels, and both subsequently concatenated.
num_deformable_group : int, (Default value = 1)
Number of deformable group partitions.
layout : str, (Default value = NCHW)
Dimension ordering of data and weight. Can be 'NCW', 'NWC', 'NCHW',
'NHWC', 'NCDHW', 'NDHWC', etc. 'N', 'C', 'H', 'W', 'D' stands for
batch, channel, height, width and depth dimensions respectively.
Convolution is performed over 'D', 'H', and 'W' dimensions.
use_bias : bool, (Default value = True)
Whether the layer for generating the output features uses a bias vector.
in_channels : int, (Default value = 0)
The number of input channels to this layer. If not specified,
initialization will be deferred to the first time `forward` is called
and input channels will be inferred from the shape of input data.
activation : str, (Default value = None)
Activation function to use. See :func:`~mxnet.ndarray.Activation`.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
weight_initializer : str or `Initializer`, (Default value = None)
Initializer for the `weight` weights matrix for the convolution layer
for generating the output features.
bias_initializer : str or `Initializer`, (Default value = zeros)
Initializer for the bias vector for the convolution layer
for generating the output features.
offset_weight_initializer : str or `Initializer`, (Default value = zeros)
Initializer for the `weight` weights matrix for the convolution layer
for generating the offset.
offset_bias_initializer : str or `Initializer`, (Default value = zeros),
Initializer for the bias vector for the convolution layer
for generating the offset.
offset_use_bias: bool, (Default value = True)
Whether the layer for generating the offset uses a bias vector.
Inputs:
- **data**: 4D input tensor with shape
`(batch_size, in_channels, height, width)` when `layout` is `NCHW`.
For other layouts shape is permuted accordingly.
Outputs:
- **out**: 4D output tensor with shape
`(batch_size, channels, out_height, out_width)` when `layout` is `NCHW`.
out_height and out_width are calculated as::
out_height = floor((height+2*padding[0]-dilation[0]*(kernel_size[0]-1)-1)/stride[0])+1
out_width = floor((width+2*padding[1]-dilation[1]*(kernel_size[1]-1)-1)/stride[1])+1
"""
def __init__(self, channels, kernel_size=(1, 1), strides=(1, 1), padding=(0, 0), dilation=(1, 1), groups=1,
num_deformable_group=1, layout='NCHW', use_bias=True, in_channels=0, activation=None,
weight_initializer=None, bias_initializer='zeros',
offset_weight_initializer='zeros', offset_bias_initializer='zeros', offset_use_bias=True,
op_name='ModulatedDeformableConvolution', adj=None, prefix=None, params=None):
super(ModulatedDeformableConvolution, self).__init__(prefix=prefix, params=params)
with self.name_scope():
self._channels = channels
self._in_channels = in_channels
assert layout in ('NCHW', 'NHWC'), "Only supports 'NCHW' and 'NHWC' layout for now"
if isinstance(kernel_size, numeric_types):
kernel_size = (kernel_size,) * 2
if isinstance(strides, numeric_types):
strides = (strides,) * len(kernel_size)
if isinstance(padding, numeric_types):
padding = (padding,) * len(kernel_size)
if isinstance(dilation, numeric_types):
dilation = (dilation,) * len(kernel_size)
self._op_name = op_name
offset_channels = num_deformable_group * 3 * kernel_size[0] * kernel_size[1]
self.offset_split_index = num_deformable_group * 2 * kernel_size[0] * kernel_size[1]
self._kwargs_offset = {
'kernel': kernel_size, 'stride': strides, 'dilate': dilation,
'pad': padding, 'num_filter': offset_channels, 'num_group': groups,
'no_bias': not offset_use_bias, 'layout': layout}
self._kwargs_deformable_conv = {
'kernel': kernel_size, 'stride': strides, 'dilate': dilation,
'pad': padding, 'num_filter': channels, 'num_group': groups,
'num_deformable_group': num_deformable_group,
'no_bias': not use_bias, 'layout': layout}
if adj:
self._kwargs_offset['adj'] = adj
self._kwargs_deformable_conv['adj'] = adj
deformable_conv_weight_shape = [0] * (len(kernel_size) + 2)
deformable_conv_weight_shape[0] = channels
deformable_conv_weight_shape[2] = kernel_size[0]
deformable_conv_weight_shape[3] = kernel_size[1]
self.deformable_conv_weight = self.params.get('deformable_conv_weight',
shape=deformable_conv_weight_shape,
init=weight_initializer,
allow_deferred_init=True)
if use_bias:
self.deformable_conv_bias = self.params.get('deformable_conv_bias', shape=(channels,),
init=bias_initializer,
allow_deferred_init=True)
else:
self.deformable_conv_bias = None
dshape = [0] * (len(kernel_size) + 2)
dshape[layout.find('N')] = 1
dshape[layout.find('C')] = in_channels
op = getattr(symbol, 'Convolution')
offset = op(symbol.var('data', shape=dshape), **self._kwargs_offset)
offsetshapes = offset.infer_shape_partial()[0]
self.offset_weight = self.params.get('offset_weight', shape=offsetshapes[1],
init=offset_weight_initializer,
allow_deferred_init=True)
if offset_use_bias:
self.offset_bias = self.params.get('offset_bias', shape=offsetshapes[2],
init=offset_bias_initializer,
allow_deferred_init=True)
else:
self.offset_bias = None
if activation:
self.act = Activation(activation, prefix=activation + '_')
else:
self.act = None
def hybrid_forward(self, F, x, offset_weight, deformable_conv_weight, offset_bias=None, deformable_conv_bias=None):
if offset_bias is None:
offset = F.Convolution(x, offset_weight, cudnn_off=True, **self._kwargs_offset)
else:
offset = F.Convolution(x, offset_weight, offset_bias, cudnn_off=True, **self._kwargs_offset)
offset_t = F.slice_axis(offset, axis=1, begin=0, end=self.offset_split_index)
mask = F.slice_axis(offset, axis=1, begin=self.offset_split_index, end=None)
mask = F.sigmoid(mask) * 2
if deformable_conv_bias is None:
act = F.contrib.ModulatedDeformableConvolution(data=x, offset=offset_t, mask=mask,
weight=deformable_conv_weight,
name='fwd', **self._kwargs_deformable_conv)
else:
act = F.contrib.ModulatedDeformableConvolution(data=x, offset=offset_t, mask=mask,
weight=deformable_conv_weight,
bias=deformable_conv_bias, name='fwd',
**self._kwargs_deformable_conv)
if self.act:
act = self.act(act)
return act
def _alias(self):
return 'modulated_deformable_conv'
| 51.817955 | 119 | 0.608306 | 2,445 | 20,779 | 4.989366 | 0.130061 | 0.056234 | 0.034429 | 0.022379 | 0.846709 | 0.834577 | 0.826871 | 0.81146 | 0.807443 | 0.803181 | 0 | 0.01156 | 0.300592 | 20,779 | 400 | 120 | 51.9475 | 0.82784 | 0.392127 | 0 | 0.812183 | 0 | 0 | 0.076573 | 0.017881 | 0 | 0 | 0 | 0 | 0.010152 | 1 | 0.035533 | false | 0 | 0.020305 | 0.010152 | 0.091371 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c49454ed75f1b5ca4a15695d871daa124092a6f3 | 112 | py | Python | testtool/same.py | szstonelee/bunnyredis | 9c5b6c084e4bb1c705e670897d24d203e1039762 | [
"BSD-3-Clause"
] | 12 | 2021-07-24T12:26:24.000Z | 2022-03-03T13:55:41.000Z | testtool/same.py | szstonelee/bunnyredis | 9c5b6c084e4bb1c705e670897d24d203e1039762 | [
"BSD-3-Clause"
] | null | null | null | testtool/same.py | szstonelee/bunnyredis | 9c5b6c084e4bb1c705e670897d24d203e1039762 | [
"BSD-3-Clause"
] | null | null | null | from test_common import *
init_common_redis("192.168.0.11", "192.168.0.22", "192.168.0.33")
compare_all()
| 11.2 | 65 | 0.678571 | 21 | 112 | 3.428571 | 0.666667 | 0.25 | 0.291667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.27551 | 0.125 | 112 | 9 | 66 | 12.444444 | 0.459184 | 0 | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
c4d697a712ac946788cfde017a2f614dc0afcc12 | 23,779 | py | Python | pygsti/modelmembers/errorgencontainer.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 73 | 2016-01-28T05:02:05.000Z | 2022-03-30T07:46:33.000Z | pygsti/modelmembers/errorgencontainer.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 113 | 2016-02-25T15:32:18.000Z | 2022-03-31T13:18:13.000Z | pygsti/modelmembers/errorgencontainer.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 41 | 2016-03-15T19:32:07.000Z | 2022-02-16T10:22:05.000Z | """
Defines the ErrorGeneratorContainer helper class.
"""
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
import numpy as _np
from pygsti.baseobjs.basis import BuiltinBasis as _BuiltinBasis
class ErrorGeneratorContainer(object):
"""
Add-on class that implements a number of error-generator access functions
"""
def __init__(self, errorgen):
self.errorgen = errorgen
def errorgen_coefficients(self, return_basis=False, logscale_nonham=False):
"""
Constructs a dictionary of the Lindblad-error-generator coefficients of this operation.
Note that these are not necessarily the parameter values, as these
coefficients are generally functions of the parameters (so as to keep
the coefficients positive, for instance).
Parameters
----------
return_basis : bool, optional
Whether to also return a :class:`Basis` containing the elements
with which the error generator terms were constructed.
logscale_nonham : bool, optional
Whether or not the non-hamiltonian error generator coefficients
should be scaled so that the returned dict contains:
`(1 - exp(-d^2 * coeff)) / d^2` instead of `coeff`. This
essentially converts the coefficient into a rate that is
the contribution this term would have within a depolarizing
channel where all stochastic generators had this same coefficient.
This is the value returned by :method:`error_rates`.
Returns
-------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Basis labels are integers starting at 0. Values are complex
coefficients.
basis : Basis
A Basis mapping the basis labels used in the
keys of `lindblad_term_dict` to basis matrices.
"""
return self.errorgen.coefficients(return_basis, logscale_nonham)
def errorgen_coefficient_labels(self):
"""
The elementary error-generator labels corresponding to the elements of :method:`errorgen_coefficients_array`.
Returns
-------
tuple
A tuple of (<type>, <basisEl1> [,<basisEl2]) elements identifying the elementary error
generators of this gate.
"""
return self.errorgen.coefficient_labels()
def errorgen_coefficients_array(self):
"""
The weighted coefficients of this operation's error generator in terms of "standard" error generators.
Constructs a 1D array of all the coefficients returned by :method:`errorgen_coefficients`,
weighted so that different error generators can be weighted differently when a
`errorgen_penalty_factor` is used in an objective function.
Returns
-------
numpy.ndarray
A 1D array of length equal to the number of coefficients in the linear combination
of standard error generators that is this operation's error generator.
"""
return self.errorgen.coefficients_array()
def errorgen_coefficients_array_deriv_wrt_params(self):
"""
The jacobian of :method:`errogen_coefficients_array` with respect to this operation's parameters.
Returns
-------
numpy.ndarray
A 2D array of shape `(num_coeffs, num_params)` where `num_coeffs` is the number of
coefficients of this operation's error generator and `num_params` is this operation's
number of parameters.
"""
return self.errorgen.coefficients_array_deriv_wrt_params()
def error_rates(self):
"""
Constructs a dictionary of the error rates associated with this operation.
The "error rate" for an individual Hamiltonian error is the angle
about the "axis" (generalized in the multi-qubit case)
corresponding to a particular basis element, i.e. `theta` in
the unitary channel `U = exp(i * theta/2 * BasisElement)`.
The "error rate" for an individual Stochastic error is the
contribution that basis element's term would have to the
error rate of a depolarization channel. For example, if
the rate corresponding to the term ('S','X') is 0.01 this
means that the coefficient of the rho -> X*rho*X-rho error
generator is set such that if this coefficient were used
for all 3 (X,Y, and Z) terms the resulting depolarizing
channel would have error rate 3*0.01 = 0.03.
Note that because error generator terms do not necessarily
commute with one another, the sum of the returned error
rates is not necessarily the error rate of the overall
channel.
Returns
-------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Values are real error rates except for the 2-basis-label
case.
"""
return self.errorgen_coefficients(return_basis=False, logscale_nonham=True)
def set_errorgen_coefficients(self, lindblad_term_dict, action="update", logscale_nonham=False, truncate=False):
"""
Sets the coefficients of terms in the error generator of this operation.
The dictionary `lindblad_term_dict` has tuple-keys describing the type of term and the basis
elements used to construct it, e.g. `('H','X')`.
Parameters
----------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Values are the coefficients of these error generators,
and should be real except for the 2-basis-label case.
action : {"update","add","reset"}
How the values in `lindblad_term_dict` should be combined with existing
error-generator coefficients.
logscale_nonham : bool, optional
Whether or not the values in `lindblad_term_dict` for non-hamiltonian
error generators should be interpreted as error *rates* (of an
"equivalent" depolarizing channel, see :method:`errorgen_coefficients`)
instead of raw coefficients. If True, then the non-hamiltonian
coefficients are set to `-log(1 - d^2*rate)/d^2`, where `rate` is
the corresponding value given in `lindblad_term_dict`. This is what is
performed by the function :method:`set_error_rates`.
truncate : bool, optional
Whether to allow adjustment of the errogen coefficients in
order to meet constraints (e.g. to preserve CPTP) when necessary.
If False, then an error is thrown when the given coefficients
cannot be set as specified.
Returns
-------
None
"""
self.errorgen.set_coefficients(lindblad_term_dict, action, logscale_nonham, truncate)
self._update_rep()
self.dirty = True
def set_error_rates(self, lindblad_term_dict, action="update"):
"""
Sets the coeffcients of terms in the error generator of this operation.
Values are set so that the contributions of the resulting channel's
error rate are given by the values in `lindblad_term_dict`. See
:method:`error_rates` for more details.
Parameters
----------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Values are real error rates except for the 2-basis-label
case, when they may be complex.
action : {"update","add","reset"}
How the values in `lindblad_term_dict` should be combined with existing
error rates.
Returns
-------
None
"""
self.set_errorgen_coefficients(lindblad_term_dict, action, logscale_nonham=True)
class ErrorMapContainer(object):
"""
Add-on class that implements a number of error-generator access functions
"""
def __init__(self, error_map):
self.error_map = error_map
def errorgen_coefficients(self, return_basis=False, logscale_nonham=False):
"""
Constructs a dictionary of the Lindblad-error-generator coefficients of this operation.
Note that these are not necessarily the parameter values, as these
coefficients are generally functions of the parameters (so as to keep
the coefficients positive, for instance).
Parameters
----------
return_basis : bool, optional
Whether to also return a :class:`Basis` containing the elements
with which the error generator terms were constructed.
logscale_nonham : bool, optional
Whether or not the non-hamiltonian error generator coefficients
should be scaled so that the returned dict contains:
`(1 - exp(-d^2 * coeff)) / d^2` instead of `coeff`. This
essentially converts the coefficient into a rate that is
the contribution this term would have within a depolarizing
channel where all stochastic generators had this same coefficient.
This is the value returned by :method:`error_rates`.
Returns
-------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Basis labels are integers starting at 0. Values are complex
coefficients.
basis : Basis
A Basis mapping the basis labels used in the
keys of `lindblad_term_dict` to basis matrices.
"""
return self.error_map.errorgen_coefficients(return_basis, logscale_nonham)
def errorgen_coefficient_labels(self):
"""
The elementary error-generator labels corresponding to the elements of :method:`errorgen_coefficients_array`.
Returns
-------
tuple
A tuple of (<type>, <basisEl1> [,<basisEl2]) elements identifying the elementary error
generators of this gate.
"""
return self.errormap.errorgen_coefficient_labels()
def errorgen_coefficients_array(self):
"""
The weighted coefficients of this operation's error generator in terms of "standard" error generators.
Constructs a 1D array of all the coefficients returned by :method:`errorgen_coefficients`,
weighted so that different error generators can be weighted differently when a
`errorgen_penalty_factor` is used in an objective function.
Returns
-------
numpy.ndarray
A 1D array of length equal to the number of coefficients in the linear combination
of standard error generators that is this operation's error generator.
"""
return self.error_map.errorgen_coefficients_array()
def errorgen_coefficients_array_deriv_wrt_params(self):
"""
The jacobian of :method:`errogen_coefficients_array` with respect to this operation's parameters.
Returns
-------
numpy.ndarray
A 2D array of shape `(num_coeffs, num_params)` where `num_coeffs` is the number of
coefficients of this operation's error generator and `num_params` is this operation's
number of parameters.
"""
return self.error_map.errorgen_coefficients_array_deriv_wrt_params()
def error_rates(self):
"""
Constructs a dictionary of the error rates associated with this operation.
The "error rate" for an individual Hamiltonian error is the angle
about the "axis" (generalized in the multi-qubit case)
corresponding to a particular basis element, i.e. `theta` in
the unitary channel `U = exp(i * theta/2 * BasisElement)`.
The "error rate" for an individual Stochastic error is the
contribution that basis element's term would have to the
error rate of a depolarization channel. For example, if
the rate corresponding to the term ('S','X') is 0.01 this
means that the coefficient of the rho -> X*rho*X-rho error
generator is set such that if this coefficient were used
for all 3 (X,Y, and Z) terms the resulting depolarizing
channel would have error rate 3*0.01 = 0.03.
Note that because error generator terms do not necessarily
commute with one another, the sum of the returned error
rates is not necessarily the error rate of the overall
channel.
Returns
-------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Values are real error rates except for the 2-basis-label
case.
"""
return self.errorgen_coefficients(return_basis=False, logscale_nonham=True)
class NoErrorGeneratorInterface(object):
"""
Add-on class that implements a number of error-generator access functions for an op that has no error generator.
"""
def errorgen_coefficients(self, return_basis=False, logscale_nonham=False):
"""
Constructs a dictionary of the Lindblad-error-generator coefficients of this operation.
Note that these are not necessarily the parameter values, as these
coefficients are generally functions of the parameters (so as to keep
the coefficients positive, for instance).
Parameters
----------
return_basis : bool, optional
Whether to also return a :class:`Basis` containing the elements
with which the error generator terms were constructed.
logscale_nonham : bool, optional
Whether or not the non-hamiltonian error generator coefficients
should be scaled so that the returned dict contains:
`(1 - exp(-d^2 * coeff)) / d^2` instead of `coeff`. This
essentially converts the coefficient into a rate that is
the contribution this term would have within a depolarizing
channel where all stochastic generators had this same coefficient.
This is the value returned by :method:`error_rates`.
Returns
-------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Basis labels are integers starting at 0. Values are complex
coefficients.
basis : Basis
A Basis mapping the basis labels used in the
keys of `lindblad_term_dict` to basis matrices.
"""
if return_basis:
return {}, _BuiltinBasis('pp', 1)
else:
return {}
def set_errorgen_coefficients(self, lindblad_term_dict, action="update", logscale_nonham=False, truncate=True):
"""
Sets the coefficients of terms in the error generator of this operation.
The dictionary `lindblad_term_dict` has tuple-keys describing the type of term and the basis
elements used to construct it, e.g. `('H','X')`.
Parameters
----------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Values are the coefficients of these error generators,
and should be real except for the 2-basis-label case.
action : {"update","add","reset"}
How the values in `lindblad_term_dict` should be combined with existing
error-generator coefficients.
logscale_nonham : bool, optional
Whether or not the values in `lindblad_term_dict` for non-hamiltonian
error generators should be interpreted as error *rates* (of an
"equivalent" depolarizing channel, see :method:`errorgen_coefficients`)
instead of raw coefficients. If True, then the non-hamiltonian
coefficients are set to `-log(1 - d^2*rate)/d^2`, where `rate` is
the corresponding value given in `lindblad_term_dict`. This is what is
performed by the function :method:`set_error_rates`.
truncate : bool, optional
Whether to allow adjustment of the errogen coefficients in
order to meet constraints (e.g. to preserve CPTP) when necessary.
If False, then an error is thrown when the given coefficients
cannot be set as specified.
Returns
-------
None
"""
if len(lindblad_term_dict) > 0:
raise ValueError("Cannot set any error generator coefficients on an op with no error generator!")
def errorgen_coefficient_labels(self):
"""
The elementary error-generator labels corresponding to the elements of :method:`errorgen_coefficients_array`.
Returns
-------
tuple
A tuple of (<type>, <basisEl1> [,<basisEl2]) elements identifying the elementary error
generators of this gate.
"""
return ()
def errorgen_coefficients_array(self):
"""
The weighted coefficients of this operation's error generator in terms of "standard" error generators.
Constructs a 1D array of all the coefficients returned by :method:`errorgen_coefficients`,
weighted so that different error generators can be weighted differently when a
`errorgen_penalty_factor` is used in an objective function.
Returns
-------
numpy.ndarray
A 1D array of length equal to the number of coefficients in the linear combination
of standard error generators that is this operation's error generator.
"""
return _np.empty(0, 'd')
def errorgen_coefficients_array_deriv_wrt_params(self):
"""
The jacobian of :method:`errogen_coefficients_array` with respect to this operation's parameters.
Returns
-------
numpy.ndarray
A 2D array of shape `(num_coeffs, num_params)` where `num_coeffs` is the number of
coefficients of this operation's error generator and `num_params` is this operation's
number of parameters.
"""
return _np.empty((0, self.num_params), 'd')
def error_rates(self):
"""
Constructs a dictionary of the error rates associated with this operation.
The "error rate" for an individual Hamiltonian error is the angle
about the "axis" (generalized in the multi-qubit case)
corresponding to a particular basis element, i.e. `theta` in
the unitary channel `U = exp(i * theta/2 * BasisElement)`.
The "error rate" for an individual Stochastic error is the
contribution that basis element's term would have to the
error rate of a depolarization channel. For example, if
the rate corresponding to the term ('S','X') is 0.01 this
means that the coefficient of the rho -> X*rho*X-rho error
generator is set such that if this coefficient were used
for all 3 (X,Y, and Z) terms the resulting depolarizing
channel would have error rate 3*0.01 = 0.03.
Note that because error generator terms do not necessarily
commute with one another, the sum of the returned error
rates is not necessarily the error rate of the overall
channel.
Returns
-------
lindblad_term_dict : dict
Keys are `(termType, basisLabel1, <basisLabel2>)`
tuples, where `termType` is `"H"` (Hamiltonian), `"S"` (Stochastic),
or `"A"` (Affine). Hamiltonian and Affine terms always have a
single basis label (so key is a 2-tuple) whereas Stochastic tuples
have 1 basis label to indicate a *diagonal* term and otherwise have
2 basis labels to specify off-diagonal non-Hamiltonian Lindblad
terms. Values are real error rates except for the 2-basis-label
case.
"""
return {}
| 45.905405 | 117 | 0.645149 | 2,974 | 23,779 | 5.088433 | 0.10121 | 0.035155 | 0.029604 | 0.016058 | 0.930219 | 0.928104 | 0.923677 | 0.916276 | 0.916276 | 0.913566 | 0 | 0.008342 | 0.279112 | 23,779 | 517 | 118 | 45.994197 | 0.87446 | 0.752765 | 0 | 0.372549 | 0 | 0 | 0.034032 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.392157 | false | 0 | 0.039216 | 0 | 0.803922 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
c4f2f6d8e4c5260a1197accb6511038f5f55ad37 | 30,368 | py | Python | tests/unit/requests/test_download.py | q-logic/google-resumable-media-python | 90bd0c1a6a88b53c2049cd75cf73129fcecde5de | [
"Apache-2.0"
] | null | null | null | tests/unit/requests/test_download.py | q-logic/google-resumable-media-python | 90bd0c1a6a88b53c2049cd75cf73129fcecde5de | [
"Apache-2.0"
] | 1 | 2019-07-23T04:05:26.000Z | 2019-08-07T16:10:00.000Z | tests/unit/requests/test_download.py | q-logic/google-resumable-media-python | 90bd0c1a6a88b53c2049cd75cf73129fcecde5de | [
"Apache-2.0"
] | 4 | 2019-07-08T13:13:08.000Z | 2019-10-09T05:01:16.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import mock
import pytest
from six.moves import http_client
from google.resumable_media import common
from google.resumable_media import _helpers
from google.resumable_media.requests import download as download_mod
from google.resumable_media.requests import _request_helpers
EXAMPLE_URL = (
u"https://www.googleapis.com/download/storage/v1/b/"
u"{BUCKET}/o/{OBJECT}?alt=media"
)
EXPECTED_TIMEOUT = (61, 60)
class TestDownload(object):
def test__write_to_stream_no_hash_check(self):
stream = io.BytesIO()
download = download_mod.Download(EXAMPLE_URL, stream=stream)
chunk1 = b"right now, "
chunk2 = b"but a little later"
response = _mock_response(chunks=[chunk1, chunk2], headers={})
ret_val = download._write_to_stream(response)
assert ret_val is None
assert stream.getvalue() == chunk1 + chunk2
# Check mocks.
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.iter_content.assert_called_once_with(
chunk_size=_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_unicode=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c", None])
def test__write_to_stream_with_hash_check_success(self, checksum):
stream = io.BytesIO()
download = download_mod.Download(EXAMPLE_URL, stream=stream, checksum=checksum)
chunk1 = b"first chunk, count starting at 0. "
chunk2 = b"second chunk, or chunk 1, which is better? "
chunk3 = b"ordinals and numerals and stuff."
header_value = u"crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w=="
headers = {_helpers._HASH_HEADER: header_value}
response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers)
ret_val = download._write_to_stream(response)
assert ret_val is None
assert stream.getvalue() == chunk1 + chunk2 + chunk3
# Check mocks.
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.iter_content.assert_called_once_with(
chunk_size=_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_unicode=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
def test__write_to_stream_with_hash_check_fail(self, checksum):
stream = io.BytesIO()
download = download_mod.Download(EXAMPLE_URL, stream=stream, checksum=checksum)
chunk1 = b"first chunk, count starting at 0. "
chunk2 = b"second chunk, or chunk 1, which is better? "
chunk3 = b"ordinals and numerals and stuff."
bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ=="
header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum)
headers = {_helpers._HASH_HEADER: header_value}
response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers)
with pytest.raises(common.DataCorruption) as exc_info:
download._write_to_stream(response)
assert not download.finished
error = exc_info.value
assert error.response is response
assert len(error.args) == 1
if checksum == u"md5":
good_checksum = u"fPAJHnnoi/+NadyNxT2c2w=="
else:
good_checksum = u"qmNCyg=="
msg = download_mod._CHECKSUM_MISMATCH.format(
EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper()
)
assert error.args[0] == msg
# Check mocks.
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.iter_content.assert_called_once_with(
chunk_size=_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_unicode=False
)
def test__write_to_stream_with_invalid_checksum_type(self):
BAD_CHECKSUM_TYPE = "badsum"
stream = io.BytesIO()
download = download_mod.Download(
EXAMPLE_URL, stream=stream, checksum=BAD_CHECKSUM_TYPE
)
chunk1 = b"first chunk, count starting at 0. "
chunk2 = b"second chunk, or chunk 1, which is better? "
chunk3 = b"ordinals and numerals and stuff."
bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ=="
header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum)
headers = {_helpers._HASH_HEADER: header_value}
response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers)
with pytest.raises(ValueError) as exc_info:
download._write_to_stream(response)
assert not download.finished
error = exc_info.value
assert error.args[0] == "checksum must be ``'md5'``, ``'crc32c'`` or ``None``"
def _consume_helper(
self,
stream=None,
end=65536,
headers=None,
chunks=(),
response_headers=None,
checksum="md5",
timeout=None,
):
download = download_mod.Download(
EXAMPLE_URL, stream=stream, end=end, headers=headers, checksum=checksum
)
transport = mock.Mock(spec=["request"])
transport.request.return_value = _mock_response(
chunks=chunks, headers=response_headers
)
assert not download.finished
if timeout is not None:
ret_val = download.consume(transport, timeout=timeout)
else:
ret_val = download.consume(transport)
assert ret_val is transport.request.return_value
called_kwargs = {
u"data": None,
u"headers": download._headers,
u"timeout": EXPECTED_TIMEOUT if timeout is None else timeout,
}
if chunks:
assert stream is not None
called_kwargs[u"stream"] = True
transport.request.assert_called_once_with(u"GET", EXAMPLE_URL, **called_kwargs)
range_bytes = u"bytes={:d}-{:d}".format(0, end)
assert download._headers[u"range"] == range_bytes
assert download.finished
return transport
def test_consume(self):
self._consume_helper()
def test_consume_with_custom_timeout(self):
self._consume_helper(timeout=14.7)
@pytest.mark.parametrize("checksum", ["md5", "crc32c", None])
def test_consume_with_stream(self, checksum):
stream = io.BytesIO()
chunks = (b"up down ", b"charlie ", b"brown")
transport = self._consume_helper(
stream=stream, chunks=chunks, checksum=checksum
)
assert stream.getvalue() == b"".join(chunks)
# Check mocks.
response = transport.request.return_value
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.iter_content.assert_called_once_with(
chunk_size=_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_unicode=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
def test_consume_with_stream_hash_check_success(self, checksum):
stream = io.BytesIO()
chunks = (b"up down ", b"charlie ", b"brown")
header_value = u"crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ=="
headers = {_helpers._HASH_HEADER: header_value}
transport = self._consume_helper(
stream=stream, chunks=chunks, response_headers=headers, checksum=checksum
)
assert stream.getvalue() == b"".join(chunks)
# Check mocks.
response = transport.request.return_value
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.iter_content.assert_called_once_with(
chunk_size=_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_unicode=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
def test_consume_with_stream_hash_check_fail(self, checksum):
stream = io.BytesIO()
download = download_mod.Download(EXAMPLE_URL, stream=stream, checksum=checksum)
chunks = (b"zero zero", b"niner tango")
bad_checksum = u"anVzdCBub3QgdGhpcyAxLA=="
header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum)
headers = {_helpers._HASH_HEADER: header_value}
transport = mock.Mock(spec=["request"])
transport.request.return_value = _mock_response(chunks=chunks, headers=headers)
assert not download.finished
with pytest.raises(common.DataCorruption) as exc_info:
download.consume(transport)
assert stream.getvalue() == b"".join(chunks)
assert download.finished
assert download._headers == {}
error = exc_info.value
assert error.response is transport.request.return_value
assert len(error.args) == 1
if checksum == u"md5":
good_checksum = u"1A/dxEpys717C6FH7FIWDw=="
else:
good_checksum = u"GvNZlg=="
msg = download_mod._CHECKSUM_MISMATCH.format(
EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper()
)
assert error.args[0] == msg
# Check mocks.
transport.request.assert_called_once_with(
u"GET",
EXAMPLE_URL,
data=None,
headers={},
stream=True,
timeout=EXPECTED_TIMEOUT,
)
def test_consume_with_headers(self):
headers = {} # Empty headers
end = 16383
self._consume_helper(end=end, headers=headers)
range_bytes = u"bytes={:d}-{:d}".format(0, end)
# Make sure the headers have been modified.
assert headers == {u"range": range_bytes}
class TestRawDownload(object):
def test__write_to_stream_no_hash_check(self):
stream = io.BytesIO()
download = download_mod.RawDownload(EXAMPLE_URL, stream=stream)
chunk1 = b"right now, "
chunk2 = b"but a little later"
response = _mock_raw_response(chunks=[chunk1, chunk2], headers={})
ret_val = download._write_to_stream(response)
assert ret_val is None
assert stream.getvalue() == chunk1 + chunk2
# Check mocks.
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.raw.stream.assert_called_once_with(
_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c", None])
def test__write_to_stream_with_hash_check_success(self, checksum):
stream = io.BytesIO()
download = download_mod.RawDownload(
EXAMPLE_URL, stream=stream, checksum=checksum
)
chunk1 = b"first chunk, count starting at 0. "
chunk2 = b"second chunk, or chunk 1, which is better? "
chunk3 = b"ordinals and numerals and stuff."
header_value = u"crc32c=qmNCyg==,md5=fPAJHnnoi/+NadyNxT2c2w=="
headers = {_helpers._HASH_HEADER: header_value}
response = _mock_raw_response(chunks=[chunk1, chunk2, chunk3], headers=headers)
ret_val = download._write_to_stream(response)
assert ret_val is None
assert stream.getvalue() == chunk1 + chunk2 + chunk3
# Check mocks.
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.raw.stream.assert_called_once_with(
_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
def test__write_to_stream_with_hash_check_fail(self, checksum):
stream = io.BytesIO()
download = download_mod.RawDownload(
EXAMPLE_URL, stream=stream, checksum=checksum
)
chunk1 = b"first chunk, count starting at 0. "
chunk2 = b"second chunk, or chunk 1, which is better? "
chunk3 = b"ordinals and numerals and stuff."
bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ=="
header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum)
headers = {_helpers._HASH_HEADER: header_value}
response = _mock_raw_response(chunks=[chunk1, chunk2, chunk3], headers=headers)
with pytest.raises(common.DataCorruption) as exc_info:
download._write_to_stream(response)
assert not download.finished
error = exc_info.value
assert error.response is response
assert len(error.args) == 1
if checksum == u"md5":
good_checksum = u"fPAJHnnoi/+NadyNxT2c2w=="
else:
good_checksum = u"qmNCyg=="
msg = download_mod._CHECKSUM_MISMATCH.format(
EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper()
)
assert error.args[0] == msg
# Check mocks.
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.raw.stream.assert_called_once_with(
_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False
)
def test__write_to_stream_with_invalid_checksum_type(self):
BAD_CHECKSUM_TYPE = "badsum"
stream = io.BytesIO()
download = download_mod.RawDownload(
EXAMPLE_URL, stream=stream, checksum=BAD_CHECKSUM_TYPE
)
chunk1 = b"first chunk, count starting at 0. "
chunk2 = b"second chunk, or chunk 1, which is better? "
chunk3 = b"ordinals and numerals and stuff."
bad_checksum = u"d3JvbmcgbiBtYWRlIHVwIQ=="
header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum)
headers = {_helpers._HASH_HEADER: header_value}
response = _mock_response(chunks=[chunk1, chunk2, chunk3], headers=headers)
with pytest.raises(ValueError) as exc_info:
download._write_to_stream(response)
assert not download.finished
error = exc_info.value
assert error.args[0] == "checksum must be ``'md5'``, ``'crc32c'`` or ``None``"
def _consume_helper(
self,
stream=None,
end=65536,
headers=None,
chunks=(),
response_headers=None,
checksum=None,
timeout=None,
):
download = download_mod.RawDownload(
EXAMPLE_URL, stream=stream, end=end, headers=headers, checksum=checksum
)
transport = mock.Mock(spec=["request"])
transport.request.return_value = _mock_raw_response(
chunks=chunks, headers=response_headers
)
assert not download.finished
if timeout is not None:
ret_val = download.consume(transport, timeout=timeout)
else:
ret_val = download.consume(transport)
assert ret_val is transport.request.return_value
if chunks:
assert stream is not None
transport.request.assert_called_once_with(
u"GET",
EXAMPLE_URL,
data=None,
headers=download._headers,
stream=True,
timeout=EXPECTED_TIMEOUT if timeout is None else timeout,
)
range_bytes = u"bytes={:d}-{:d}".format(0, end)
assert download._headers[u"range"] == range_bytes
assert download.finished
return transport
def test_consume(self):
self._consume_helper()
def test_consume_with_custom_timeout(self):
self._consume_helper(timeout=14.7)
@pytest.mark.parametrize("checksum", ["md5", "crc32c", None])
def test_consume_with_stream(self, checksum):
stream = io.BytesIO()
chunks = (b"up down ", b"charlie ", b"brown")
transport = self._consume_helper(
stream=stream, chunks=chunks, checksum=checksum
)
assert stream.getvalue() == b"".join(chunks)
# Check mocks.
response = transport.request.return_value
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.raw.stream.assert_called_once_with(
_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
def test_consume_with_stream_hash_check_success(self, checksum):
stream = io.BytesIO()
chunks = (b"up down ", b"charlie ", b"brown")
header_value = u"crc32c=UNIQxg==,md5=JvS1wjMvfbCXgEGeaJJLDQ=="
headers = {_helpers._HASH_HEADER: header_value}
transport = self._consume_helper(
stream=stream, chunks=chunks, response_headers=headers, checksum=checksum
)
assert stream.getvalue() == b"".join(chunks)
# Check mocks.
response = transport.request.return_value
response.__enter__.assert_called_once_with()
response.__exit__.assert_called_once_with(None, None, None)
response.raw.stream.assert_called_once_with(
_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False
)
@pytest.mark.parametrize("checksum", ["md5", "crc32c"])
def test_consume_with_stream_hash_check_fail(self, checksum):
stream = io.BytesIO()
download = download_mod.RawDownload(
EXAMPLE_URL, stream=stream, checksum=checksum
)
chunks = (b"zero zero", b"niner tango")
bad_checksum = u"anVzdCBub3QgdGhpcyAxLA=="
header_value = u"crc32c={bad},md5={bad}".format(bad=bad_checksum)
headers = {_helpers._HASH_HEADER: header_value}
transport = mock.Mock(spec=["request"])
transport.request.return_value = _mock_raw_response(
chunks=chunks, headers=headers
)
assert not download.finished
with pytest.raises(common.DataCorruption) as exc_info:
download.consume(transport)
assert stream.getvalue() == b"".join(chunks)
assert download.finished
assert download._headers == {}
error = exc_info.value
assert error.response is transport.request.return_value
assert len(error.args) == 1
if checksum == u"md5":
good_checksum = u"1A/dxEpys717C6FH7FIWDw=="
else:
good_checksum = u"GvNZlg=="
msg = download_mod._CHECKSUM_MISMATCH.format(
EXAMPLE_URL, bad_checksum, good_checksum, checksum_type=checksum.upper()
)
assert error.args[0] == msg
# Check mocks.
transport.request.assert_called_once_with(
u"GET",
EXAMPLE_URL,
data=None,
headers={},
stream=True,
timeout=EXPECTED_TIMEOUT,
)
def test_consume_with_headers(self):
headers = {} # Empty headers
end = 16383
self._consume_helper(end=end, headers=headers)
range_bytes = u"bytes={:d}-{:d}".format(0, end)
# Make sure the headers have been modified.
assert headers == {u"range": range_bytes}
class TestChunkedDownload(object):
@staticmethod
def _response_content_range(start_byte, end_byte, total_bytes):
return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes)
def _response_headers(self, start_byte, end_byte, total_bytes):
content_length = end_byte - start_byte + 1
resp_range = self._response_content_range(start_byte, end_byte, total_bytes)
return {
u"content-length": u"{:d}".format(content_length),
u"content-range": resp_range,
}
def _mock_response(
self, start_byte, end_byte, total_bytes, content=None, status_code=None
):
response_headers = self._response_headers(start_byte, end_byte, total_bytes)
return mock.Mock(
content=content,
headers=response_headers,
status_code=status_code,
spec=["content", "headers", "status_code"],
)
def test_consume_next_chunk_already_finished(self):
download = download_mod.ChunkedDownload(EXAMPLE_URL, 512, None)
download._finished = True
with pytest.raises(ValueError):
download.consume_next_chunk(None)
def _mock_transport(self, start, chunk_size, total_bytes, content=b""):
transport = mock.Mock(spec=["request"])
assert len(content) == chunk_size
transport.request.return_value = self._mock_response(
start,
start + chunk_size - 1,
total_bytes,
content=content,
status_code=int(http_client.OK),
)
return transport
def test_consume_next_chunk(self):
start = 1536
stream = io.BytesIO()
data = b"Just one chunk."
chunk_size = len(data)
download = download_mod.ChunkedDownload(
EXAMPLE_URL, chunk_size, stream, start=start
)
total_bytes = 16384
transport = self._mock_transport(start, chunk_size, total_bytes, content=data)
# Verify the internal state before consuming a chunk.
assert not download.finished
assert download.bytes_downloaded == 0
assert download.total_bytes is None
# Actually consume the chunk and check the output.
ret_val = download.consume_next_chunk(transport)
assert ret_val is transport.request.return_value
range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1)
download_headers = {u"range": range_bytes}
transport.request.assert_called_once_with(
u"GET",
EXAMPLE_URL,
data=None,
headers=download_headers,
timeout=EXPECTED_TIMEOUT,
)
assert stream.getvalue() == data
# Go back and check the internal state after consuming the chunk.
assert not download.finished
assert download.bytes_downloaded == chunk_size
assert download.total_bytes == total_bytes
def test_consume_next_chunk_with_custom_timeout(self):
start = 1536
stream = io.BytesIO()
data = b"Just one chunk."
chunk_size = len(data)
download = download_mod.ChunkedDownload(
EXAMPLE_URL, chunk_size, stream, start=start
)
total_bytes = 16384
transport = self._mock_transport(start, chunk_size, total_bytes, content=data)
# Actually consume the chunk and check the output.
download.consume_next_chunk(transport, timeout=14.7)
range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1)
download_headers = {u"range": range_bytes}
transport.request.assert_called_once_with(
u"GET", EXAMPLE_URL, data=None, headers=download_headers, timeout=14.7,
)
class TestRawChunkedDownload(object):
@staticmethod
def _response_content_range(start_byte, end_byte, total_bytes):
return u"bytes {:d}-{:d}/{:d}".format(start_byte, end_byte, total_bytes)
def _response_headers(self, start_byte, end_byte, total_bytes):
content_length = end_byte - start_byte + 1
resp_range = self._response_content_range(start_byte, end_byte, total_bytes)
return {
u"content-length": u"{:d}".format(content_length),
u"content-range": resp_range,
}
def _mock_response(
self, start_byte, end_byte, total_bytes, content=None, status_code=None
):
response_headers = self._response_headers(start_byte, end_byte, total_bytes)
return mock.Mock(
_content=content,
headers=response_headers,
status_code=status_code,
spec=["_content", "headers", "status_code"],
)
def test_consume_next_chunk_already_finished(self):
download = download_mod.RawChunkedDownload(EXAMPLE_URL, 512, None)
download._finished = True
with pytest.raises(ValueError):
download.consume_next_chunk(None)
def _mock_transport(self, start, chunk_size, total_bytes, content=b""):
transport = mock.Mock(spec=["request"])
assert len(content) == chunk_size
transport.request.return_value = self._mock_response(
start,
start + chunk_size - 1,
total_bytes,
content=content,
status_code=int(http_client.OK),
)
return transport
def test_consume_next_chunk(self):
start = 1536
stream = io.BytesIO()
data = b"Just one chunk."
chunk_size = len(data)
download = download_mod.RawChunkedDownload(
EXAMPLE_URL, chunk_size, stream, start=start
)
total_bytes = 16384
transport = self._mock_transport(start, chunk_size, total_bytes, content=data)
# Verify the internal state before consuming a chunk.
assert not download.finished
assert download.bytes_downloaded == 0
assert download.total_bytes is None
# Actually consume the chunk and check the output.
ret_val = download.consume_next_chunk(transport)
assert ret_val is transport.request.return_value
range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1)
download_headers = {u"range": range_bytes}
transport.request.assert_called_once_with(
u"GET",
EXAMPLE_URL,
data=None,
headers=download_headers,
stream=True,
timeout=EXPECTED_TIMEOUT,
)
assert stream.getvalue() == data
# Go back and check the internal state after consuming the chunk.
assert not download.finished
assert download.bytes_downloaded == chunk_size
assert download.total_bytes == total_bytes
def test_consume_next_chunk_with_custom_timeout(self):
start = 1536
stream = io.BytesIO()
data = b"Just one chunk."
chunk_size = len(data)
download = download_mod.RawChunkedDownload(
EXAMPLE_URL, chunk_size, stream, start=start
)
total_bytes = 16384
transport = self._mock_transport(start, chunk_size, total_bytes, content=data)
# Actually consume the chunk and check the output.
download.consume_next_chunk(transport, timeout=14.7)
range_bytes = u"bytes={:d}-{:d}".format(start, start + chunk_size - 1)
download_headers = {u"range": range_bytes}
transport.request.assert_called_once_with(
u"GET",
EXAMPLE_URL,
data=None,
headers=download_headers,
stream=True,
timeout=14.7,
)
assert stream.getvalue() == data
# Go back and check the internal state after consuming the chunk.
assert not download.finished
assert download.bytes_downloaded == chunk_size
assert download.total_bytes == total_bytes
class Test__add_decoder(object):
def test_non_gzipped(self):
response_raw = mock.Mock(headers={}, spec=["headers"])
md5_hash = download_mod._add_decoder(response_raw, mock.sentinel.md5_hash)
assert md5_hash is mock.sentinel.md5_hash
def test_gzipped(self):
headers = {u"content-encoding": u"gzip"}
response_raw = mock.Mock(headers=headers, spec=["headers", "_decoder"])
md5_hash = download_mod._add_decoder(response_raw, mock.sentinel.md5_hash)
assert md5_hash is not mock.sentinel.md5_hash
assert isinstance(md5_hash, _helpers._DoNothingHash)
assert isinstance(response_raw._decoder, download_mod._GzipDecoder)
assert response_raw._decoder._checksum is mock.sentinel.md5_hash
class Test_GzipDecoder(object):
def test_constructor(self):
decoder = download_mod._GzipDecoder(mock.sentinel.md5_hash)
assert decoder._checksum is mock.sentinel.md5_hash
def test_decompress(self):
md5_hash = mock.Mock(spec=["update"])
decoder = download_mod._GzipDecoder(md5_hash)
data = b"\x1f\x8b\x08\x08"
result = decoder.decompress(data)
assert result == b""
md5_hash.update.assert_called_once_with(data)
def _mock_response(status_code=http_client.OK, chunks=(), headers=None):
if headers is None:
headers = {}
if chunks:
mock_raw = mock.Mock(headers=headers, spec=["headers"])
response = mock.MagicMock(
headers=headers,
status_code=int(status_code),
raw=mock_raw,
spec=[
u"__enter__",
u"__exit__",
u"iter_content",
u"status_code",
u"headers",
u"raw",
],
)
# i.e. context manager returns ``self``.
response.__enter__.return_value = response
response.__exit__.return_value = None
response.iter_content.return_value = iter(chunks)
return response
else:
return mock.Mock(
headers=headers,
status_code=int(status_code),
spec=["status_code", "headers"],
)
def _mock_raw_response(status_code=http_client.OK, chunks=(), headers=None):
if headers is None:
headers = {}
mock_raw = mock.Mock(headers=headers, spec=["stream"])
mock_raw.stream.return_value = iter(chunks)
response = mock.MagicMock(
headers=headers,
status_code=int(status_code),
raw=mock_raw,
spec=[
u"__enter__",
u"__exit__",
u"iter_content",
u"status_code",
u"headers",
u"raw",
],
)
# i.e. context manager returns ``self``.
response.__enter__.return_value = response
response.__exit__.return_value = None
return response
| 36.765133 | 87 | 0.645021 | 3,551 | 30,368 | 5.227542 | 0.078006 | 0.019393 | 0.033615 | 0.042019 | 0.922426 | 0.913484 | 0.907612 | 0.890804 | 0.887734 | 0.883747 | 0 | 0.013464 | 0.258957 | 30,368 | 825 | 88 | 36.809697 | 0.811411 | 0.04564 | 0 | 0.817204 | 0 | 0 | 0.084759 | 0.019937 | 0 | 0 | 0 | 0 | 0.18126 | 1 | 0.064516 | false | 0 | 0.012289 | 0.003072 | 0.105991 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c4f486637cfa4b5de53ba42401955ca64679b0df | 3,845 | py | Python | seconds-to-pretty-string.py | Lcast15/useful-functions | e8396ec965d1e8244cdc4b45fc4e7c38048a5f4e | [
"MIT"
] | null | null | null | seconds-to-pretty-string.py | Lcast15/useful-functions | e8396ec965d1e8244cdc4b45fc4e7c38048a5f4e | [
"MIT"
] | null | null | null | seconds-to-pretty-string.py | Lcast15/useful-functions | e8396ec965d1e8244cdc4b45fc4e7c38048a5f4e | [
"MIT"
] | null | null | null | def Time2String(Duration, Names = [[' minute', ' minutes'], [' hour', ' hours'], [' day', ' days'], [' week', ' weeks'], [' year', ' years'], ', ']):
Duration = int(Duration)
DurValue = Duration
if Duration < 3600:
if math.floor(DurValue/60) == 1:
Duration = str(math.floor(DurValue/60)) + Names[0][0]
else:
Duration = str(math.floor(DurValue/60)) + Names[0][1]
elif Duration < 86400:
if math.floor(DurValue/60/60) == 1:
Duration = str(math.floor(DurValue/60/60)) + Names[1][0] + Names[5]
else:
Duration = str(math.floor(DurValue/60/60)) + Names[1][1] + Names[5]
if math.floor(DurValue/60%60) == 1:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][0]
else:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][1]
elif Duration < 604800:
if math.floor(DurValue/60/60/24) == 1:
Duration = str(math.floor(DurValue/60/60/24)) + Names[2][0] + Names[5]
else:
Duration = str(math.floor(DurValue/60/60/24)) + Names[2][1] + Names[5]
if math.floor(DurValue/60/60%24) == 1:
Duration = Duration + str(math.floor(DurValue/60/60%24)) + Names[1][0] + Names[5]
else:
Duration = Duration + str(math.floor(DurValue/60/60%24)) + Names[1][1] + Names[5]
if math.floor(DurValue/60%60) == 1:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][0]
else:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][1]
elif Duration < 31449600:
if math.floor(DurValue/60/60/24/7) == 1:
Duration = str(math.floor(DurValue/60/60/24/7)) + Names[3][0] + Names[5]
else:
Duration = str(math.floor(DurValue/60/60/24/7)) + Names[3][1] + Names[5]
if math.floor(DurValue/60/60/24%7) == 1:
Duration = Duration + str(math.floor(DurValue/60/60/24%7)) + Names[2][0] + Names[5]
else:
Duration = Duration + str(math.floor(DurValue/60/60/24%7)) + Names[2][1] + Names[5]
if math.floor(DurValue/60/60%24) == 1:
Duration = Duration + str(math.floor(DurValue/60/60%24)) + Names[1][0] + Names[5]
else:
Duration = Duration + str(math.floor(DurValue/60/60%24)) + Names[1][1] + Names[5]
if math.floor(DurValue/60%60) == 1:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][0]
else:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][1]
else:
if math.floor(DurValue/60/60/24/7/52) == 1:
Duration = str(math.floor(DurValue/60/60/24/7/52)) + Names[4][0] + Names[5]
else:
Duration = str(math.floor(DurValue/60/60/24/7/52)) + Names[4][1] + Names[5]
if math.floor(DurValue/60/60/24/7%52) == 1:
Duration = Duration + str(math.floor(DurValue/60/60/24/7%52)) + Names[3][0] + Names[5]
else:
Duration = Duration + str(math.floor(DurValue/60/60/24/7%52)) + Names[3][1] + Names[5]
if math.floor(DurValue/60/60/24%7) == 1:
Duration = Duration + str(math.floor(DurValue/60/60/24%7)) + Names[2][0] + Names[5]
else:
Duration = Duration + str(math.floor(DurValue/60/60/24%7)) + Names[2][1] + Names[5]
if math.floor(DurValue/60/60%24) == 1:
Duration = Duration + str(math.floor(DurValue/60/60%24)) + Names[1][0] + Names[5]
else:
Duration = Duration + str(math.floor(DurValue/60/60%24)) + Names[1][1] + Names[5]
if math.floor(DurValue/60%60) == 1:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][0]
else:
Duration = Duration + str(math.floor(DurValue/60%60)) + Names[0][1]
return str(Duration)
#you can use this function with custom names and separator by using a list based off this:
#[['single minute', 'multiple minutes'], ['single hour', 'multiple hours'], ['single day', 'multiple days'], ['single week', 'multiple weeks'], ['single year', 'multiple years'], 'separator (", " is the default one)']
#just pass the list after the seconds value e.g: func(120, [['m', 'm'], ['h', 'h'], ['d', 'd'], ['w', 'w'], ['y', 'y'], ', '])
| 45.235294 | 217 | 0.627048 | 624 | 3,845 | 3.863782 | 0.115385 | 0.16798 | 0.317296 | 0.354625 | 0.821651 | 0.81377 | 0.807964 | 0.806304 | 0.765243 | 0.738698 | 0 | 0.116754 | 0.160208 | 3,845 | 84 | 218 | 45.77381 | 0.629916 | 0.111834 | 0 | 0.623188 | 0 | 0 | 0.017292 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014493 | false | 0 | 0 | 0 | 0.028986 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
488217040c9c513408b0a028a23fd798ee8749ac | 22,003 | py | Python | release/stubs/Grasshopper/Kernel/Utility.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 182 | 2017-06-27T02:26:15.000Z | 2022-03-30T18:53:43.000Z | release/stubs/Grasshopper/Kernel/Utility.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 28 | 2017-06-27T13:38:23.000Z | 2022-03-15T11:19:44.000Z | release/stubs/Grasshopper/Kernel/Utility.py | htlcnn/ironpython-stubs | 780d829e2104b2789d5f4d6f32b0ec9f2930ca03 | [
"MIT"
] | 67 | 2017-06-28T09:43:59.000Z | 2022-03-20T21:17:10.000Z | # encoding: utf-8
# module Grasshopper.Kernel.Utility calls itself Utility
# from Grasshopper, Version=1.0.0.20, Culture=neutral, PublicKeyToken=dda4f5ec2cd80803
# by generator 1.145
""" NamespaceTracker represent a CLS namespace. """
# no imports
# no functions
# classes
class GH_IntervalWrapperDelegate(MulticastDelegate, ICloneable, ISerializable):
""" GH_IntervalWrapperDelegate(TargetObject: object, TargetMethod: IntPtr) """
def BeginInvoke(self, sender, interval, DelegateCallback, DelegateAsyncState):
""" BeginInvoke(self: GH_IntervalWrapperDelegate, sender: GH_Interval_Wrapper, interval: Interval, DelegateCallback: AsyncCallback, DelegateAsyncState: object) -> IAsyncResult """
pass
def CombineImpl(self, *args): #cannot find CLR method
"""
CombineImpl(self: MulticastDelegate, follow: Delegate) -> Delegate
Combines this System.Delegate with the specified System.Delegate to form a new delegate.
follow: The delegate to combine with this delegate.
Returns: A delegate that is the new root of the System.MulticastDelegate invocation list.
"""
pass
def DynamicInvokeImpl(self, *args): #cannot find CLR method
"""
DynamicInvokeImpl(self: Delegate, args: Array[object]) -> object
Dynamically invokes (late-bound) the method represented by the current delegate.
args: An array of objects that are the arguments to pass to the method represented by the current
delegate.-or- null, if the method represented by the current delegate does not require
arguments.
Returns: The object returned by the method represented by the delegate.
"""
pass
def EndInvoke(self, DelegateAsyncResult):
""" EndInvoke(self: GH_IntervalWrapperDelegate, DelegateAsyncResult: IAsyncResult) """
pass
def GetMethodImpl(self, *args): #cannot find CLR method
"""
GetMethodImpl(self: MulticastDelegate) -> MethodInfo
Returns a static method represented by the current System.MulticastDelegate.
Returns: A static method represented by the current System.MulticastDelegate.
"""
pass
def Invoke(self, sender, interval):
""" Invoke(self: GH_IntervalWrapperDelegate, sender: GH_Interval_Wrapper, interval: Interval) """
pass
def RemoveImpl(self, *args): #cannot find CLR method
"""
RemoveImpl(self: MulticastDelegate, value: Delegate) -> Delegate
Removes an element from the invocation list of this System.MulticastDelegate that is equal to
the specified delegate.
value: The delegate to search for in the invocation list.
Returns: If value is found in the invocation list for this instance, then a new System.Delegate without
value in its invocation list; otherwise, this instance with its original invocation list.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, TargetObject, TargetMethod):
""" __new__(cls: type, TargetObject: object, TargetMethod: IntPtr) """
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
class GH_Interval_Wrapper(object):
""" GH_Interval_Wrapper(interval: Interval, wrapperdelegate: GH_IntervalWrapperDelegate) -> Interval """
def InternalInterval(self):
""" InternalInterval(self: GH_Interval_Wrapper) -> Interval """
pass
@staticmethod # known case of __new__
def __new__(self, interval, wrapperdelegate):
""" __new__(cls: type, interval: Interval, wrapperdelegate: GH_IntervalWrapperDelegate) -> Interval """
pass
A = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: A(self: GH_Interval_Wrapper) -> float
Set: A(self: GH_Interval_Wrapper) = value
"""
B = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: B(self: GH_Interval_Wrapper) -> float
Set: B(self: GH_Interval_Wrapper) = value
"""
Increasing = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Increasing(self: GH_Interval_Wrapper) -> str
"""
Length = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Length(self: GH_Interval_Wrapper) -> str
"""
class GH_Interval_Wrapper_TypeConverter(ExpandableObjectConverter):
""" GH_Interval_Wrapper_TypeConverter() """
def CanConvertFrom(self, *__args):
""" CanConvertFrom(self: GH_Interval_Wrapper_TypeConverter, context: ITypeDescriptorContext, sourceType: Type) -> bool """
pass
def CanConvertTo(self, *__args):
""" CanConvertTo(self: GH_Interval_Wrapper_TypeConverter, context: ITypeDescriptorContext, destinationType: Type) -> bool """
pass
def ConvertTo(self, *__args):
""" ConvertTo(self: GH_Interval_Wrapper_TypeConverter, context: ITypeDescriptorContext, culture: CultureInfo, value: object, destinationType: Type) -> object """
pass
class GH_PlaneModifier(object):
# no doc
@staticmethod
def Set_X(P, x_axis):
""" Set_X(P: Plane, x_axis: Vector3d) -> Plane """
pass
@staticmethod
def Set_Y(P, y_axis):
""" Set_Y(P: Plane, y_axis: Vector3d) -> Plane """
pass
@staticmethod
def Set_Z(P, z_axis):
""" Set_Z(P: Plane, z_axis: Vector3d) -> Plane """
pass
class GH_Point3dWrapperDelegate(MulticastDelegate, ICloneable, ISerializable):
""" GH_Point3dWrapperDelegate(TargetObject: object, TargetMethod: IntPtr) """
def BeginInvoke(self, sender, point, DelegateCallback, DelegateAsyncState):
""" BeginInvoke(self: GH_Point3dWrapperDelegate, sender: GH_Point3d_Wrapper, point: Point3d, DelegateCallback: AsyncCallback, DelegateAsyncState: object) -> IAsyncResult """
pass
def CombineImpl(self, *args): #cannot find CLR method
"""
CombineImpl(self: MulticastDelegate, follow: Delegate) -> Delegate
Combines this System.Delegate with the specified System.Delegate to form a new delegate.
follow: The delegate to combine with this delegate.
Returns: A delegate that is the new root of the System.MulticastDelegate invocation list.
"""
pass
def DynamicInvokeImpl(self, *args): #cannot find CLR method
"""
DynamicInvokeImpl(self: Delegate, args: Array[object]) -> object
Dynamically invokes (late-bound) the method represented by the current delegate.
args: An array of objects that are the arguments to pass to the method represented by the current
delegate.-or- null, if the method represented by the current delegate does not require
arguments.
Returns: The object returned by the method represented by the delegate.
"""
pass
def EndInvoke(self, DelegateAsyncResult):
""" EndInvoke(self: GH_Point3dWrapperDelegate, DelegateAsyncResult: IAsyncResult) """
pass
def GetMethodImpl(self, *args): #cannot find CLR method
"""
GetMethodImpl(self: MulticastDelegate) -> MethodInfo
Returns a static method represented by the current System.MulticastDelegate.
Returns: A static method represented by the current System.MulticastDelegate.
"""
pass
def Invoke(self, sender, point):
""" Invoke(self: GH_Point3dWrapperDelegate, sender: GH_Point3d_Wrapper, point: Point3d) """
pass
def RemoveImpl(self, *args): #cannot find CLR method
"""
RemoveImpl(self: MulticastDelegate, value: Delegate) -> Delegate
Removes an element from the invocation list of this System.MulticastDelegate that is equal to
the specified delegate.
value: The delegate to search for in the invocation list.
Returns: If value is found in the invocation list for this instance, then a new System.Delegate without
value in its invocation list; otherwise, this instance with its original invocation list.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, TargetObject, TargetMethod):
""" __new__(cls: type, TargetObject: object, TargetMethod: IntPtr) """
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
class GH_Point3d_Wrapper(object):
""" GH_Point3d_Wrapper(pt: Point3d, wrapperdelegate: GH_Point3dWrapperDelegate) -> Point3d """
def InternalPoint(self):
""" InternalPoint(self: GH_Point3d_Wrapper) -> Point3d """
pass
@staticmethod # known case of __new__
def __new__(self, pt, wrapperdelegate):
""" __new__(cls: type, pt: Point3d, wrapperdelegate: GH_Point3dWrapperDelegate) -> Point3d """
pass
X = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: X(self: GH_Point3d_Wrapper) -> float
Set: X(self: GH_Point3d_Wrapper) = value
"""
Y = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Y(self: GH_Point3d_Wrapper) -> float
Set: Y(self: GH_Point3d_Wrapper) = value
"""
Z = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Z(self: GH_Point3d_Wrapper) -> float
Set: Z(self: GH_Point3d_Wrapper) = value
"""
class GH_Point3d_Wrapper_TypeConverter(ExpandableObjectConverter):
""" GH_Point3d_Wrapper_TypeConverter() """
def CanConvertFrom(self, *__args):
""" CanConvertFrom(self: GH_Point3d_Wrapper_TypeConverter, context: ITypeDescriptorContext, sourceType: Type) -> bool """
pass
def CanConvertTo(self, *__args):
""" CanConvertTo(self: GH_Point3d_Wrapper_TypeConverter, context: ITypeDescriptorContext, destinationType: Type) -> bool """
pass
def ConvertTo(self, *__args):
""" ConvertTo(self: GH_Point3d_Wrapper_TypeConverter, context: ITypeDescriptorContext, culture: CultureInfo, value: object, destinationType: Type) -> object """
pass
class GH_PointRefUVWrapperDelegate(MulticastDelegate, ICloneable, ISerializable):
""" GH_PointRefUVWrapperDelegate(TargetObject: object, TargetMethod: IntPtr) """
def BeginInvoke(self, sender, ref, DelegateCallback, DelegateAsyncState):
""" BeginInvoke(self: GH_PointRefUVWrapperDelegate, sender: GH_PointRefUV_Wrapper, ref: GH_PointRefData, DelegateCallback: AsyncCallback, DelegateAsyncState: object) -> IAsyncResult """
pass
def CombineImpl(self, *args): #cannot find CLR method
"""
CombineImpl(self: MulticastDelegate, follow: Delegate) -> Delegate
Combines this System.Delegate with the specified System.Delegate to form a new delegate.
follow: The delegate to combine with this delegate.
Returns: A delegate that is the new root of the System.MulticastDelegate invocation list.
"""
pass
def DynamicInvokeImpl(self, *args): #cannot find CLR method
"""
DynamicInvokeImpl(self: Delegate, args: Array[object]) -> object
Dynamically invokes (late-bound) the method represented by the current delegate.
args: An array of objects that are the arguments to pass to the method represented by the current
delegate.-or- null, if the method represented by the current delegate does not require
arguments.
Returns: The object returned by the method represented by the delegate.
"""
pass
def EndInvoke(self, DelegateAsyncResult):
""" EndInvoke(self: GH_PointRefUVWrapperDelegate, DelegateAsyncResult: IAsyncResult) """
pass
def GetMethodImpl(self, *args): #cannot find CLR method
"""
GetMethodImpl(self: MulticastDelegate) -> MethodInfo
Returns a static method represented by the current System.MulticastDelegate.
Returns: A static method represented by the current System.MulticastDelegate.
"""
pass
def Invoke(self, sender, ref):
""" Invoke(self: GH_PointRefUVWrapperDelegate, sender: GH_PointRefUV_Wrapper, ref: GH_PointRefData) """
pass
def RemoveImpl(self, *args): #cannot find CLR method
"""
RemoveImpl(self: MulticastDelegate, value: Delegate) -> Delegate
Removes an element from the invocation list of this System.MulticastDelegate that is equal to
the specified delegate.
value: The delegate to search for in the invocation list.
Returns: If value is found in the invocation list for this instance, then a new System.Delegate without
value in its invocation list; otherwise, this instance with its original invocation list.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, TargetObject, TargetMethod):
""" __new__(cls: type, TargetObject: object, TargetMethod: IntPtr) """
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
class GH_PointRefUV_Wrapper(object):
""" GH_PointRefUV_Wrapper(ref: GH_PointRefData, wrapperdelegate: GH_PointRefUVWrapperDelegate) -> GH_PointRefData """
def InternalRefence(self):
""" InternalRefence(self: GH_PointRefUV_Wrapper) -> GH_PointRefData """
pass
@staticmethod # known case of __new__
def __new__(self, ref, wrapperdelegate):
""" __new__(cls: type, ref: GH_PointRefData, wrapperdelegate: GH_PointRefUVWrapperDelegate) -> GH_PointRefData """
pass
U = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: U(self: GH_PointRefUV_Wrapper) -> float
Set: U(self: GH_PointRefUV_Wrapper) = value
"""
V = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: V(self: GH_PointRefUV_Wrapper) -> float
Set: V(self: GH_PointRefUV_Wrapper) = value
"""
class GH_PointRefUV_Wrapper_TypeConverter(ExpandableObjectConverter):
""" GH_PointRefUV_Wrapper_TypeConverter() """
def CanConvertFrom(self, *__args):
""" CanConvertFrom(self: GH_PointRefUV_Wrapper_TypeConverter, context: ITypeDescriptorContext, sourceType: Type) -> bool """
pass
def CanConvertTo(self, *__args):
""" CanConvertTo(self: GH_PointRefUV_Wrapper_TypeConverter, context: ITypeDescriptorContext, destinationType: Type) -> bool """
pass
def ConvertTo(self, *__args):
""" ConvertTo(self: GH_PointRefUV_Wrapper_TypeConverter, context: ITypeDescriptorContext, culture: CultureInfo, value: object, destinationType: Type) -> object """
pass
class GH_Vector3dWrapperDelegate(MulticastDelegate, ICloneable, ISerializable):
""" GH_Vector3dWrapperDelegate(TargetObject: object, TargetMethod: IntPtr) """
def BeginInvoke(self, sender, vector, DelegateCallback, DelegateAsyncState):
""" BeginInvoke(self: GH_Vector3dWrapperDelegate, sender: GH_Vector3d_Wrapper, vector: Vector3d, DelegateCallback: AsyncCallback, DelegateAsyncState: object) -> IAsyncResult """
pass
def CombineImpl(self, *args): #cannot find CLR method
"""
CombineImpl(self: MulticastDelegate, follow: Delegate) -> Delegate
Combines this System.Delegate with the specified System.Delegate to form a new delegate.
follow: The delegate to combine with this delegate.
Returns: A delegate that is the new root of the System.MulticastDelegate invocation list.
"""
pass
def DynamicInvokeImpl(self, *args): #cannot find CLR method
"""
DynamicInvokeImpl(self: Delegate, args: Array[object]) -> object
Dynamically invokes (late-bound) the method represented by the current delegate.
args: An array of objects that are the arguments to pass to the method represented by the current
delegate.-or- null, if the method represented by the current delegate does not require
arguments.
Returns: The object returned by the method represented by the delegate.
"""
pass
def EndInvoke(self, DelegateAsyncResult):
""" EndInvoke(self: GH_Vector3dWrapperDelegate, DelegateAsyncResult: IAsyncResult) """
pass
def GetMethodImpl(self, *args): #cannot find CLR method
"""
GetMethodImpl(self: MulticastDelegate) -> MethodInfo
Returns a static method represented by the current System.MulticastDelegate.
Returns: A static method represented by the current System.MulticastDelegate.
"""
pass
def Invoke(self, sender, vector):
""" Invoke(self: GH_Vector3dWrapperDelegate, sender: GH_Vector3d_Wrapper, vector: Vector3d) """
pass
def RemoveImpl(self, *args): #cannot find CLR method
"""
RemoveImpl(self: MulticastDelegate, value: Delegate) -> Delegate
Removes an element from the invocation list of this System.MulticastDelegate that is equal to
the specified delegate.
value: The delegate to search for in the invocation list.
Returns: If value is found in the invocation list for this instance, then a new System.Delegate without
value in its invocation list; otherwise, this instance with its original invocation list.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod # known case of __new__
def __new__(self, TargetObject, TargetMethod):
""" __new__(cls: type, TargetObject: object, TargetMethod: IntPtr) """
pass
def __reduce_ex__(self, *args): #cannot find CLR method
pass
class GH_Vector3d_Wrapper(object):
""" GH_Vector3d_Wrapper(vec: Vector3d, wrapperdelegate: GH_Vector3dWrapperDelegate) -> Vector3d """
def InternalVector(self):
""" InternalVector(self: GH_Vector3d_Wrapper) -> Vector3d """
pass
@staticmethod # known case of __new__
def __new__(self, vec, wrapperdelegate):
""" __new__(cls: type, vec: Vector3d, wrapperdelegate: GH_Vector3dWrapperDelegate) -> Vector3d """
pass
Length = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Length(self: GH_Vector3d_Wrapper) -> str
"""
Tiny = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Tiny(self: GH_Vector3d_Wrapper) -> str
"""
X = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: X(self: GH_Vector3d_Wrapper) -> float
Set: X(self: GH_Vector3d_Wrapper) = value
"""
Y = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Y(self: GH_Vector3d_Wrapper) -> float
Set: Y(self: GH_Vector3d_Wrapper) = value
"""
Z = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Z(self: GH_Vector3d_Wrapper) -> float
Set: Z(self: GH_Vector3d_Wrapper) = value
"""
class GH_Vector3d_Wrapper_TypeConverter(ExpandableObjectConverter):
""" GH_Vector3d_Wrapper_TypeConverter() """
def CanConvertFrom(self, *__args):
""" CanConvertFrom(self: GH_Vector3d_Wrapper_TypeConverter, context: ITypeDescriptorContext, sourceType: Type) -> bool """
pass
def CanConvertTo(self, *__args):
""" CanConvertTo(self: GH_Vector3d_Wrapper_TypeConverter, context: ITypeDescriptorContext, destinationType: Type) -> bool """
pass
def ConvertTo(self, *__args):
""" ConvertTo(self: GH_Vector3d_Wrapper_TypeConverter, context: ITypeDescriptorContext, culture: CultureInfo, value: object, destinationType: Type) -> object """
pass
| 42.232246 | 221 | 0.653047 | 2,294 | 22,003 | 6.047515 | 0.077158 | 0.02249 | 0.02422 | 0.03114 | 0.878397 | 0.844806 | 0.796367 | 0.783608 | 0.755785 | 0.697254 | 0 | 0.005076 | 0.256783 | 22,003 | 520 | 222 | 42.313462 | 0.84327 | 0.567604 | 0 | 0.768293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.384146 | false | 0.384146 | 0 | 0 | 0.54878 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 10 |
6ff1902baefcc2210499bae14845023be36fa4d0 | 54,744 | py | Python | web2py/applications/rip/modules/lib/python2.7/site-packages/pyVmomi/DataserviceTypes.py | 2spmohanty/vcenter-automation | 1d10b765ef335087902b0194ed12a61e53807987 | [
"Apache-2.0"
] | 1 | 2019-10-02T13:25:03.000Z | 2019-10-02T13:25:03.000Z | web2py/applications/rip/modules/lib/python2.7/site-packages/pyVmomi/DataserviceTypes.py | 2spmohanty/vcenter-automation | 1d10b765ef335087902b0194ed12a61e53807987 | [
"Apache-2.0"
] | null | null | null | web2py/applications/rip/modules/lib/python2.7/site-packages/pyVmomi/DataserviceTypes.py | 2spmohanty/vcenter-automation | 1d10b765ef335087902b0194ed12a61e53807987 | [
"Apache-2.0"
] | 1 | 2021-11-05T09:51:02.000Z | 2021-11-05T09:51:02.000Z | # ******* WARNING - AUTO GENERATED CODE - DO NOT EDIT *******
from .VmomiSupport import CreateDataType, CreateManagedType
from .VmomiSupport import CreateEnumType
from .VmomiSupport import AddVersion, AddVersionParent
from .VmomiSupport import AddBreakingChangesInfo
from .VmomiSupport import F_LINK, F_LINKABLE
from .VmomiSupport import F_OPTIONAL, F_SECRET
from .VmomiSupport import newestVersions, stableVersions
from .VmomiSupport import publicVersions, oldestVersions
AddVersion("dataservice.version.version1", "inventoryservice", "1.0", 0, "inventoryservice")
AddVersion("dataservice.version.version2", "inventoryservice", "2.0", 0, "inventoryservice")
AddVersion("dataservice.version.version3", "inventoryservice", "3.0", 0, "inventoryservice")
AddVersion("vmodl.version.version2", "", "", 0, "vim25")
AddVersion("vmodl.version.version1", "", "", 0, "vim25")
AddVersion("vmodl.version.version0", "", "", 0, "vim25")
AddVersionParent("dataservice.version.version1", "dataservice.version.version1")
AddVersionParent("dataservice.version.version1", "vmodl.version.version2")
AddVersionParent("dataservice.version.version1", "vmodl.version.version1")
AddVersionParent("dataservice.version.version1", "vmodl.version.version0")
AddVersionParent("dataservice.version.version2", "dataservice.version.version1")
AddVersionParent("dataservice.version.version2", "dataservice.version.version2")
AddVersionParent("dataservice.version.version2", "vmodl.version.version2")
AddVersionParent("dataservice.version.version2", "vmodl.version.version1")
AddVersionParent("dataservice.version.version2", "vmodl.version.version0")
AddVersionParent("dataservice.version.version3", "dataservice.version.version1")
AddVersionParent("dataservice.version.version3", "dataservice.version.version2")
AddVersionParent("dataservice.version.version3", "dataservice.version.version3")
AddVersionParent("dataservice.version.version3", "vmodl.version.version2")
AddVersionParent("dataservice.version.version3", "vmodl.version.version1")
AddVersionParent("dataservice.version.version3", "vmodl.version.version0")
AddVersionParent("vmodl.version.version2", "vmodl.version.version2")
AddVersionParent("vmodl.version.version2", "vmodl.version.version1")
AddVersionParent("vmodl.version.version2", "vmodl.version.version0")
AddVersionParent("vmodl.version.version1", "vmodl.version.version1")
AddVersionParent("vmodl.version.version1", "vmodl.version.version0")
AddVersionParent("vmodl.version.version0", "vmodl.version.version0")
newestVersions.Add("dataservice.version.version3")
stableVersions.Add("dataservice.version.version3")
publicVersions.Add("dataservice.version.version3")
oldestVersions.Add("dataservice.version.version1")
CreateDataType("dataservice.QName", "QName", "vmodl.DynamicData", "dataservice.version.version2", [("namespaceURI", "string", "dataservice.version.version2", F_OPTIONAL), ("localPart", "string", "dataservice.version.version2", 0)])
CreateManagedType("dataservice.Service", "InventoryService", "vmodl.ManagedObject", "dataservice.version.version2", [("info", "dataservice.ServiceInformation", "dataservice.version.version2", 0, "System.Anonymous")], None)
CreateDataType("dataservice.ServiceInformation", "InventoryServiceInformation", "vmodl.DynamicData", "dataservice.version.version2", [("instanceUuid", "string", "dataservice.version.version2", 0), ("ssoSolutionUser", "string", "dataservice.version.version2", 0), ("certificate", "string", "dataservice.version.version2", 0), ("version", "string", "dataservice.version.version2", 0), ("httpPort", "int", "dataservice.version.version2", 0), ("httpsPort", "int", "dataservice.version.version2", 0), ("lookupServiceURI", "vmodl.URI", "dataservice.version.version2", 0)])
CreateDataType("dataservice.accesscontrol.AccessControl", "InventoryServiceAccessControl", "vmodl.DynamicData", "dataservice.version.version2", [("principal", "dataservice.accesscontrol.Principal", "dataservice.version.version2", 0), ("roles", "long[]", "dataservice.version.version2", F_OPTIONAL), ("propagate", "boolean", "dataservice.version.version2", 0), ("version", "long", "dataservice.version.version2", 0)])
CreateDataType("dataservice.accesscontrol.AclMapping", "InventoryServiceAclMapping", "vmodl.DynamicData", "dataservice.version.version2", [("document", "vmodl.URI", "dataservice.version.version2", 0), ("accessControlList", "dataservice.accesscontrol.AccessControl[]", "dataservice.version.version2", F_OPTIONAL)])
CreateManagedType("dataservice.accesscontrol.AuthorizationService", "AuthorizationService", "vmodl.ManagedObject", "dataservice.version.version2", None, [("addAccessControlList", "AuthorizationService.AddAccessControlList", "dataservice.version.version2", (("docUri", "vmodl.URI", "dataservice.version.version2", 0, "Authorization.ModifyPermissions"),("permissions", "dataservice.accesscontrol.AccessControl[]", "dataservice.version.version2", 0, None),), (0, "void", "void"), None, ["dataservice.fault.NotFoundFault", "dataservice.fault.UserNotFoundFault", ]), ("removeAccess", "AuthorizationService.RemoveAccess", "dataservice.version.version2", (("docUri", "vmodl.URI", "dataservice.version.version2", 0, "Authorization.ModifyPermissions"),("principals", "dataservice.accesscontrol.Principal[]", "dataservice.version.version2", 0, None),), (0, "void", "void"), None, ["dataservice.fault.NotFoundFault", ]), ("resetAccess", "AuthorizationService.ResetAccess", "dataservice.version.version2", (("docUri", "vmodl.URI", "dataservice.version.version2", 0, "Authorization.ModifyPermissions"),("permissions", "dataservice.accesscontrol.AccessControl[]", "dataservice.version.version2", F_OPTIONAL, None),), (0, "void", "void"), None, ["dataservice.fault.NotFoundFault", "dataservice.fault.UserNotFoundFault", ]), ("getAllPermissions", "AuthorizationService.GetAllPermissions", "dataservice.version.version2", (), (F_OPTIONAL, "dataservice.accesscontrol.AclMapping[]", "dataservice.accesscontrol.AclMapping[]"), "System.View", None), ("getPermissions", "AuthorizationService.GetPermissions", "dataservice.version.version2", (("docUri", "vmodl.URI", "dataservice.version.version2", 0, "System.View"),), (F_OPTIONAL, "dataservice.accesscontrol.AclMapping[]", "dataservice.accesscontrol.AclMapping[]"), None, None), ("addGlobalAccessControlList", "AuthorizationService.AddGlobalAccessControlList", "dataservice.version.version2", (("permissions", "dataservice.accesscontrol.AccessControl[]", "dataservice.version.version2", 0, None),), (0, "void", "void"), "Authorization.ModifyPermissions", ["dataservice.fault.UserNotFoundFault", ]), ("removeGlobalAccess", "AuthorizationService.RemoveGlobalAccess", "dataservice.version.version2", (("principals", "dataservice.accesscontrol.Principal[]", "dataservice.version.version2", 0, None),), (0, "void", "void"), "Authorization.ModifyPermissions", None), ("getGlobalAccessControlList", "AuthorizationService.GetGlobalAccessControlList", "dataservice.version.version2", (), (F_OPTIONAL, "dataservice.accesscontrol.AccessControl[]", "dataservice.accesscontrol.AccessControl[]"), "System.View", None), ("addOrUpdatePrivilege", "AuthorizationService.AddOrUpdatePrivilege", "dataservice.version.version2", (("privilegeId", "string", "dataservice.version.version2", 0, None),("description", "string", "dataservice.version.version2", 0, None),("group", "string", "dataservice.version.version2", 0, None),("isOnParent", "boolean", "dataservice.version.version2", 0, None),), (0, "dataservice.accesscontrol.Privilege", "dataservice.accesscontrol.Privilege"), "Authorization.ModifyPrivileges", None), ("deletePrivilege", "AuthorizationService.DeletePrivilege", "dataservice.version.version2", (("privilegeId", "string", "dataservice.version.version2", 0, None),), (0, "boolean", "boolean"), "Authorization.ModifyPrivileges", None), ("addRole", "AuthorizationService.AddRole", "dataservice.version.version2", (("name", "string", "dataservice.version.version2", 0, None),("description", "string", "dataservice.version.version2", 0, None),("privilegeIds", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.Role", "dataservice.accesscontrol.Role"), "Authorization.ModifyRoles", ["dataservice.fault.AlreadyExistsFault", ]), ("mergeRoles", "AuthorizationService.MergeRoles", "dataservice.version.version2", (("to", "long", "dataservice.version.version2", 0, None),("from", "long", "dataservice.version.version2", 0, None),), (0, "dataservice.accesscontrol.Role", "dataservice.accesscontrol.Role"), "Authorization.ReassignRolePermissions", ["dataservice.fault.NotFoundFault", ]), ("addTenantRole", "AuthorizationService.AddTenantRole", "dataservice.version.version2", (("tenant", "vmodl.URI", "dataservice.version.version2", 0, "Authorization.ModifyRoles"),("name", "string", "dataservice.version.version2", 0, None),("description", "string", "dataservice.version.version2", 0, None),("privilegeIds", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.Role", "dataservice.accesscontrol.Role"), None, ["dataservice.fault.AlreadyExistsFault", ]), ("deleteTenantRole", "AuthorizationService.DeleteTenantRole", "dataservice.version.version2", (("tenant", "vmodl.URI", "dataservice.version.version2", 0, "Authorization.ModifyRoles"),("roleId", "long", "dataservice.version.version2", 0, None),), (0, "boolean", "boolean"), None, None), ("updateRole", "AuthorizationService.UpdateRole", "dataservice.version.version2", (("role", "dataservice.accesscontrol.Role", "dataservice.version.version2", 0, None),), (0, "dataservice.accesscontrol.Role", "dataservice.accesscontrol.Role"), "Authorization.ModifyRoles", ["dataservice.fault.NotFoundFault", "dataservice.fault.AlreadyExistsFault", ]), ("updateTenantRole", "AuthorizationService.UpdateTenantRole", "dataservice.version.version2", (("tenant", "vmodl.URI", "dataservice.version.version2", 0, "Authorization.ModifyRoles"),("role", "dataservice.accesscontrol.Role", "dataservice.version.version2", 0, None),), (0, "dataservice.accesscontrol.Role", "dataservice.accesscontrol.Role"), None, ["dataservice.fault.NotFoundFault", "dataservice.fault.AlreadyExistsFault", ]), ("deleteRole", "AuthorizationService.DeleteRole", "dataservice.version.version2", (("roleId", "long", "dataservice.version.version2", 0, None),("forceDelete", "boolean", "dataservice.version.version2", 0, None),), (0, "boolean", "boolean"), "Authorization.ModifyRoles", None), ("deleteRoleWithForceOption", "AuthorizationService.DeleteRoleWithForceOption", "dataservice.version.version2", (("roleId", "long", "dataservice.version.version2", 0, None),("forceDelete", "boolean", "dataservice.version.version2", 0, None),), (0, "boolean", "boolean"), "Authorization.ModifyRoles", None), ("getRoles", "AuthorizationService.GetRoles", "dataservice.version.version2", (), (F_OPTIONAL, "dataservice.accesscontrol.Role[]", "dataservice.accesscontrol.Role[]"), "System.View", None), ("getTenantRoles", "AuthorizationService.GetTenantRoles", "dataservice.version.version2", (("tenant", "vmodl.URI", "dataservice.version.version2", 0, "System.View"),), (F_OPTIONAL, "dataservice.accesscontrol.Role[]", "dataservice.accesscontrol.Role[]"), None, None), ("getPrivileges", "AuthorizationService.GetPrivileges", "dataservice.version.version2", (), (F_OPTIONAL, "dataservice.accesscontrol.Privilege[]", "dataservice.accesscontrol.Privilege[]"), "System.View", None), ("hasPrivileges", "AuthorizationService.HasPrivileges", "dataservice.version.version2", (("docUri", "vmodl.URI", "dataservice.version.version2", 0, "System.View"),("userName", "string", "dataservice.version.version2", 0, None),("groups", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("privileges", "string[]", "dataservice.version.version2", 0, None),), (0, "boolean", "boolean"), None, None), ("hasSysViewPrivileges", "AuthorizationService.HasSysViewPrivileges", "dataservice.version.version2", (("docUri", "vmodl.URI", "dataservice.version.version2", 0, "System.View"),("userName", "string", "dataservice.version.version2", 0, None),("groups", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (0, "boolean", "boolean"), None, None), ("hasPrivilegesBatch", "AuthorizationService.HasPrivilegesBatch", "dataservice.version.version2", (("requests", "dataservice.accesscontrol.AuthorizationService.PrivilegeCheckRequest[]", "dataservice.version.version2", 0, None),("userName", "string", "dataservice.version.version2", 0, None),("groups", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.AuthorizationService.PrivilegeCheckResult[]", "dataservice.accesscontrol.AuthorizationService.PrivilegeCheckResult[]"), None, None), ("hasGlobalPrivileges", "AuthorizationService.HasGlobalPrivileges", "dataservice.version.version2", (("userName", "string", "dataservice.version.version2", 0, None),("groups", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("privileges", "string[]", "dataservice.version.version2", 0, None),), (0, "boolean", "boolean"), "System.View", None), ("getUserPrivileges", "AuthorizationService.GetUserPrivileges", "dataservice.version.version2", (("docUri", "vmodl.URI", "dataservice.version.version2", 0, "System.View"),("userName", "string", "dataservice.version.version2", 0, None),("groups", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (F_OPTIONAL, "string[]", "string[]"), None, None), ("getUserPrivilegesBatch", "AuthorizationService.GetUserPrivilegesBatch", "dataservice.version.version2", (("docUri", "vmodl.URI[]", "dataservice.version.version2", 0, "System.View"),("userName", "string", "dataservice.version.version2", 0, None),("groups", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.accesscontrol.AuthorizationService.UserPrivilegeResult[]", "dataservice.accesscontrol.AuthorizationService.UserPrivilegeResult[]"), None, None), ("getUserGlobalPrivileges", "AuthorizationService.GetUserGlobalPrivileges", "dataservice.version.version2", (("userName", "string", "dataservice.version.version2", 0, None),("groups", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (F_OPTIONAL, "string[]", "string[]"), "System.View", None)])
CreateDataType("dataservice.accesscontrol.AuthorizationService.PrivilegeCheckRequest", "PrivilegeCheckRequest", "vmodl.DynamicData", "dataservice.version.version1", [("docUri", "vmodl.URI", "dataservice.version.version1", 0), ("privileges", "string[]", "dataservice.version.version1", 0)])
CreateDataType("dataservice.accesscontrol.AuthorizationService.PrivilegeCheckResult", "PrivilegeCheckResult", "vmodl.DynamicData", "dataservice.version.version1", [("docUri", "vmodl.URI", "dataservice.version.version1", 0), ("docResult", "boolean", "dataservice.version.version1", 0)])
CreateDataType("dataservice.accesscontrol.AuthorizationService.UserPrivilegeResult", "UserPrivilegeResult", "vmodl.DynamicData", "dataservice.version.version1", [("docUri", "vmodl.URI", "dataservice.version.version1", 0), ("privilege", "string[]", "dataservice.version.version1", 0)])
CreateDataType("dataservice.accesscontrol.Principal", "AuthorizationServicePrincipal", "vmodl.DynamicData", "dataservice.version.version2", [("name", "string", "dataservice.version.version2", 0), ("group", "boolean", "dataservice.version.version2", 0)])
CreateDataType("dataservice.accesscontrol.Privilege", "AuthorizationServicePrivilege", "vmodl.DynamicData", "dataservice.version.version2", [("id", "string", "dataservice.version.version2", 0), ("description", "string", "dataservice.version.version2", 0), ("group", "string", "dataservice.version.version2", 0), ("version", "long", "dataservice.version.version2", 0), ("isOnParent", "boolean", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.accesscontrol.Role", "AuthorizationServiceRole", "vmodl.DynamicData", "dataservice.version.version2", [("id", "long", "dataservice.version.version2", 0), ("name", "string", "dataservice.version.version2", 0), ("description", "string", "dataservice.version.version2", 0), ("tenant", "vmodl.URI", "dataservice.version.version2", F_OPTIONAL), ("privilegeId", "string[]", "dataservice.version.version2", F_OPTIONAL), ("version", "long", "dataservice.version.version2", 0)])
CreateDataType("dataservice.accesscontrol.internal.AuthInfoData", "InventoryServiceInternalAuthInfoData", "vmodl.DynamicData", "dataservice.version.version2", [("id", "string", "dataservice.version.version2", 0), ("parent", "string", "dataservice.version.version2", F_OPTIONAL), ("altParent", "string", "dataservice.version.version2", F_OPTIONAL), ("resolvedId", "string", "dataservice.version.version2", F_OPTIONAL), ("isDeleted", "boolean", "dataservice.version.version2", F_OPTIONAL), ("providerUuid", "string", "dataservice.version.version2", F_OPTIONAL), ("product", "string", "dataservice.version.version2", F_OPTIONAL), ("owner", "dataservice.accesscontrol.Principal", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.accesscontrol.internal.AuthInfoDetail", "InventoryServiceInternalAuthInfoDetail", "vmodl.DynamicData", "dataservice.version.version2", [("version", "string", "dataservice.version.version2", 0), ("isDelta", "boolean", "dataservice.version.version2", 0), ("authInfoData", "dataservice.accesscontrol.internal.AuthInfoData[]", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.accesscontrol.internal.AuthUpdateVersion", "InventoryServiceInternalAuthUpdateVersion", "vmodl.DynamicData", "dataservice.version.version2", [("privilegeVersion", "long", "dataservice.version.version2", 0), ("roleVersion", "long", "dataservice.version.version2", 0), ("permissionVersion", "long", "dataservice.version.version2", 0), ("globalPermissionVersion", "long", "dataservice.version.version2", 0), ("authInfoVersion", "long", "dataservice.version.version2", 0), ("sysViewPrivilegesVersion", "long", "dataservice.version.version2", 0)])
CreateDataType("dataservice.accesscontrol.internal.AuthUpdateVersionV2", "InventoryServiceInternalAuthUpdateVersionV2", "vmodl.DynamicData", "dataservice.version.version2", [("privilegeVersion", "long", "dataservice.version.version2", 0), ("roleVersion", "long", "dataservice.version.version2", 0), ("permissionVersion", "long", "dataservice.version.version2", 0), ("globalPermissionVersion", "long", "dataservice.version.version2", 0), ("authInfoVersion", "long", "dataservice.version.version2", 0), ("sysViewPrivilegesVersion", "long", "dataservice.version.version2", 0)])
CreateManagedType("dataservice.accesscontrol.internal.AuthorizationServiceInternal", "AuthorizationServiceInternal", "vmodl.ManagedObject", "dataservice.version.version2", None, [("getImpliedSystemViewPrivilegeDetail", "GetImpliedSystemViewPrivilegeDetail", "dataservice.version.version2", (("providers", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("products", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("version", "string", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.internal.SysViewPrivilegeDetail", "dataservice.accesscontrol.internal.SysViewPrivilegeDetail"), "System.View", None), ("getAuthInfoDetail", "GetAuthInfoDetail", "dataservice.version.version2", (("providers", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("products", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("version", "string", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.internal.AuthInfoDetail", "dataservice.accesscontrol.internal.AuthInfoDetail"), "System.View", None), ("getPermissions", "GetPermissions", "dataservice.version.version2", (("providers", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("products", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("version", "string", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.internal.PermissionsDetail", "dataservice.accesscontrol.internal.PermissionsDetail"), "System.View", None), ("getCacheSyncPermissions", "GetCacheSyncPermissions", "dataservice.version.version2", (("providers", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("products", "string[]", "dataservice.version.version2", F_OPTIONAL, None),("version", "string", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.internal.PermissionsDetail", "dataservice.accesscontrol.internal.PermissionsDetail"), "System.View", None), ("addSolutionRole", "AddSolutionRole", "dataservice.version.version2", (("id", "long", "dataservice.version.version2", 0, None),("name", "string", "dataservice.version.version2", 0, None),("description", "string", "dataservice.version.version2", 0, None),("privilegeIds", "string[]", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.Role", "dataservice.accesscontrol.Role"), "Authorization.ModifyRoles", ["dataservice.fault.AlreadyExistsFault", ]), ("addOrUpdatePrivileges", "AddOrUpdatePrivileges", "dataservice.version.version2", (("privileges", "dataservice.accesscontrol.Privilege[]", "dataservice.version.version2", 0, None),), (0, "void", "void"), "Authorization.ModifyPrivileges", None), ("relocateRole", "RelocateRole", "dataservice.version.version2", (("roleId", "long", "dataservice.version.version2", 0, None),), (0, "void", "void"), "Authorization.ModifyRoles", None), ("getRelocatedRole", "GetRelocatedRole", "dataservice.version.version2", (("roleId", "long", "dataservice.version.version2", 0, None),), (0, "long", "long"), "System.View", None), ("waitForNextUpdate", "WaitForNextUpdate", "dataservice.version.version2", (("currVersion", "dataservice.accesscontrol.internal.AuthUpdateVersion", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.internal.AuthUpdateVersion", "dataservice.accesscontrol.internal.AuthUpdateVersion"), "System.View", None), ("waitForNextUpdateV2", "WaitForNextUpdateV2", "dataservice.version.version2", (("currVersion", "dataservice.accesscontrol.internal.AuthUpdateVersionV2", "dataservice.version.version2", F_OPTIONAL, None),), (0, "dataservice.accesscontrol.internal.AuthUpdateVersionV2", "dataservice.accesscontrol.internal.AuthUpdateVersionV2"), "System.View", None), ("removeProviderPermissions", "RemoveProviderPermissions", "dataservice.version.version2", (("provider", "string", "dataservice.version.version2", 0, None),), (0, "void", "void"), "Authorization.ModifyPermissions", None)])
CreateDataType("dataservice.accesscontrol.internal.PermissionsDetail", "InventoryServiceInternalPermissionsDetail", "vmodl.DynamicData", "dataservice.version.version2", [("version", "string", "dataservice.version.version2", 0), ("isDelta", "boolean", "dataservice.version.version2", 0), ("permissions", "dataservice.accesscontrol.AclMapping[]", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.accesscontrol.internal.PersistedRole", "InventoryServiceInternalPersistedRole", "dataservice.accesscontrol.Role", "dataservice.version.version2", [("mergedWith", "long", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.accesscontrol.internal.PrincipalDocMapping", "InventoryServicePrincipalDocMapping", "vmodl.DynamicData", "dataservice.version.version2", [("principal", "dataservice.accesscontrol.Principal", "dataservice.version.version2", 0), ("docCount", "int", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.accesscontrol.internal.SysViewPrivilegeData", "InventoryServiceSysViewPrivilegeData", "vmodl.DynamicData", "dataservice.version.version2", [("docId", "vmodl.URI", "dataservice.version.version2", 0), ("childrenWithPermission", "dataservice.accesscontrol.internal.PrincipalDocMapping[]", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.accesscontrol.internal.SysViewPrivilegeDetail", "InventoryServiceSysViewPrivilegeDetail", "vmodl.DynamicData", "dataservice.version.version2", [("version", "string", "dataservice.version.version2", 0), ("isDelta", "boolean", "dataservice.version.version2", 0), ("privilegeData", "dataservice.accesscontrol.internal.SysViewPrivilegeData[]", "dataservice.version.version2", F_OPTIONAL)])
CreateManagedType("dataservice.authentication.SessionManager", "InventoryServiceSessionManager", "vmodl.ManagedObject", "dataservice.version.version2", None, [("loginBySamlToken", "InventoryServiceSamlLogin", "dataservice.version.version2", (), (0, "void", "void"), "System.Anonymous", None), ("logout", "InventoryServiceLogout", "dataservice.version.version3", (), (0, "boolean", "boolean"), "System.Anonymous", None), ("login", "InventoryServiceLogin", "dataservice.version.version2", (("username", "string", "dataservice.version.version2", 0, None),("password", "string", "dataservice.version.version2", 0, None),), (0, "void", "void"), "System.Anonymous", None)])
CreateDataType("dataservice.fault.AlreadyExistsFault", "InventoryServiceAlreadyExistsFault", "vmodl.MethodFault", "dataservice.version.version1", None)
CreateDataType("dataservice.fault.InternalErrorFault", "InventoryServiceInternalErrorFault", "vmodl.RuntimeFault", "dataservice.version.version1", None)
CreateDataType("dataservice.fault.InvalidConfigurationFault", "InventoryServiceInvalidConfigurationFault", "vmodl.MethodFault", "dataservice.version.version1", None)
CreateDataType("dataservice.fault.InvalidTagAssociationFault", "InventoryServiceInvalidTagAssociationFault", "vmodl.MethodFault", "dataservice.version.version2", None)
CreateDataType("dataservice.fault.NotAuthenticatedFault", "InventoryServiceNotAuthenticatedFault", "vmodl.fault.SecurityError", "dataservice.version.version1", None)
CreateDataType("dataservice.fault.NotFoundFault", "InventoryServiceNotFoundFault", "vmodl.MethodFault", "dataservice.version.version1", [("object", "vmodl.URI", "dataservice.version.version1", F_OPTIONAL)])
CreateDataType("dataservice.fault.URINotFoundFault", "InventoryServiceURINotFoundFault", "vmodl.MethodFault", "dataservice.version.version2", [("object", "vmodl.URI", "dataservice.version.version2", 0)])
CreateDataType("dataservice.fault.UserNotFoundFault", "InventoryServiceUserNotFoundFault", "vmodl.MethodFault", "dataservice.version.version1", [("principalName", "string", "dataservice.version.version1", F_OPTIONAL)])
CreateDataType("dataservice.provider.ProviderConfiguration", "InventoryServiceProviderConfiguration", "vmodl.DynamicData", "dataservice.version.version1", [("product", "string", "dataservice.version.version1", 0), ("providerUuid", "string", "dataservice.version.version1", 0), ("indexConfiguration", "dataservice.provider.ProviderConfiguration.IndexConfiguration[]", "dataservice.version.version1", F_OPTIONAL), ("createProperty", "dataservice.provider.ProviderConfiguration.CreateProperty[]", "dataservice.version.version2", F_OPTIONAL), ("providerFeed", "vmodl.URI", "dataservice.version.version1", F_OPTIONAL), ("schema", "vmodl.URI", "dataservice.version.version1", F_OPTIONAL), ("providerKind", "string", "dataservice.version.version1", 0), ("permissionParentProperty", "dataservice.QName", "dataservice.version.version2", F_OPTIONAL), ("scope", "vmodl.URI", "dataservice.version.version2", F_OPTIONAL), ("ownerProperty", "dataservice.QName", "dataservice.version.version2", F_OPTIONAL), ("loginURI", "vmodl.URI", "dataservice.version.version3", F_OPTIONAL), ("soapVersionId", "string", "dataservice.version.version3", F_OPTIONAL), ("soapLoginMessage", "string", "dataservice.version.version3", F_OPTIONAL), ("vapiLoginInterfaceIdentifier", "string", "dataservice.version.version3", F_OPTIONAL), ("vapiLoginMethodIdentifier", "string", "dataservice.version.version3", F_OPTIONAL), ("vapiSessionCookieName", "string", "dataservice.version.version3", F_OPTIONAL), ("replicated", "boolean", "dataservice.version.version3", F_OPTIONAL), ("pullPollingFrequencySecs", "long", "dataservice.version.version3", F_OPTIONAL), ("providerMaxSize_MB", "long", "dataservice.version.version3", F_OPTIONAL)])
CreateDataType("dataservice.provider.ProviderConfiguration.IndexConfiguration", "InventoryServiceProviderConfigurationIndexConfiguration", "vmodl.DynamicData", "dataservice.version.version1", [("id", "string", "dataservice.version.version1", 0)])
CreateDataType("dataservice.provider.ProviderConfiguration.IndexConfiguration.Element", "InventoryServiceProviderConfigurationIndexConfigurationElement", "vmodl.DynamicData", "dataservice.version.version1", [("namespace", "string", "dataservice.version.version1", F_OPTIONAL), ("localName", "string", "dataservice.version.version1", 0)])
CreateDataType("dataservice.provider.ProviderConfiguration.IndexConfiguration.Attribute", "InventoryServiceProviderConfigurationIndexConfigurationAttribute", "vmodl.DynamicData", "dataservice.version.version2", [("namespace", "string", "dataservice.version.version2", F_OPTIONAL), ("localName", "string", "dataservice.version.version2", 0)])
CreateDataType("dataservice.provider.ProviderConfiguration.ValueIndexConfiguration", "InventoryServiceProviderConfigurationValueIndexConfiguration", "dataservice.provider.ProviderConfiguration.IndexConfiguration", "dataservice.version.version1", [("element", "dataservice.provider.ProviderConfiguration.IndexConfiguration.Element", "dataservice.version.version1", F_OPTIONAL), ("attribute", "dataservice.provider.ProviderConfiguration.IndexConfiguration.Attribute", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.provider.ProviderConfiguration.ElementNameIndexConfiguration", "InventoryServiceProviderConfigurationElementNameIndexConfiguration", "dataservice.provider.ProviderConfiguration.IndexConfiguration", "dataservice.version.version1", [("element", "dataservice.provider.ProviderConfiguration.IndexConfiguration.Element[]", "dataservice.version.version1", F_OPTIONAL)])
CreateDataType("dataservice.provider.ProviderConfiguration.FullTextIndexConfiguration", "InventoryServiceProviderConfigurationFullTextIndexConfiguration", "dataservice.provider.ProviderConfiguration.IndexConfiguration", "dataservice.version.version1", [("element", "dataservice.provider.ProviderConfiguration.FullTextIndexConfiguration.FullTextElement", "dataservice.version.version1", F_OPTIONAL)])
CreateDataType("dataservice.provider.ProviderConfiguration.FullTextIndexConfiguration.FullTextElement", "InventoryServiceProviderConfigurationFullTextIndexConfigurationFullTextElement", "dataservice.provider.ProviderConfiguration.IndexConfiguration.Element", "dataservice.version.version1", [("analyzerName", "string", "dataservice.version.version1", F_OPTIONAL)])
CreateDataType("dataservice.provider.ProviderConfiguration.CreateProperty", "InventoryServiceProviderConfigurationCreateProperty", "vmodl.DynamicData", "dataservice.version.version2", [("documentType", "dataservice.QName", "dataservice.version.version2", 0), ("properties", "dataservice.QName[]", "dataservice.version.version2", 0)])
CreateEnumType("dataservice.provider.ProviderConfiguration.ProviderKind", "InventoryServiceProviderConfigurationProviderKind", "dataservice.version.version1", ["atom_sub", "atom_post", "vpx"])
CreateManagedType("dataservice.provider.ProviderManager", "InventoryServiceProviderManager", "vmodl.ManagedObject", "dataservice.version.version1", None, [("registerProvider", "RegisterProvider", "dataservice.version.version1", (("config", "dataservice.provider.ProviderConfiguration", "dataservice.version.version1", 0, None),), (0, "void", "void"), None, ["dataservice.fault.AlreadyExistsFault", "dataservice.fault.InvalidConfigurationFault", ]), ("unregisterProvider", "UnregisterProvider", "dataservice.version.version1", (("uuid", "string", "dataservice.version.version1", 0, None),), (0, "void", "void"), None, ["dataservice.fault.NotFoundFault", ]), ("reconfigureProviderIndex", "ReconfigureProviderIndex", "dataservice.version.version1", (("uuid", "string", "dataservice.version.version1", 0, None),("config", "dataservice.provider.ProviderConfiguration.IndexConfiguration[]", "dataservice.version.version1", F_OPTIONAL, None),), (0, "void", "void"), None, ["dataservice.fault.NotFoundFault", "dataservice.fault.InvalidConfigurationFault", ]), ("retrieveProviderConfig", "RetrieveProviderConfig", "dataservice.version.version1", (("uuid", "string", "dataservice.version.version1", 0, None),), (F_OPTIONAL, "dataservice.provider.ProviderConfiguration", "dataservice.provider.ProviderConfiguration"), None, None), ("retrieveProviderConfigsByProduct", "RetrieveProviderConfigsByProduct", "dataservice.version.version1", (("product", "string", "dataservice.version.version1", 0, None),), (F_OPTIONAL, "dataservice.provider.ProviderConfiguration[]", "dataservice.provider.ProviderConfiguration[]"), None, None), ("retrieveAllProviderConfigs", "RetrieveAllProviderConfigs", "dataservice.version.version1", (), (F_OPTIONAL, "dataservice.provider.ProviderConfiguration[]", "dataservice.provider.ProviderConfiguration[]"), None, None), ("reconfigureProvider", "ReconfigureProvider", "dataservice.version.version2", (("config", "dataservice.provider.ProviderConfiguration", "dataservice.version.version2", 0, None),), (0, "void", "void"), None, ["dataservice.fault.InvalidConfigurationFault", "dataservice.fault.NotFoundFault", ]), ("resetProviderContent", "ResetProviderContent", "dataservice.version.version3", (("uuid", "string", "dataservice.version.version3", 0, None),), (0, "void", "void"), None, ["dataservice.fault.NotFoundFault", ])])
CreateManagedType("dataservice.tagging.Category", "InventoryServiceCategory", "vmodl.ManagedObject", "dataservice.version.version2", [("info", "dataservice.tagging.CategoryInfo", "dataservice.version.version2", 0, None)], [("updateInfo", "UpdateInventoryServiceCategoryDescription", "dataservice.version.version2", (("newInfo", "dataservice.tagging.CategoryInfo", "dataservice.version.version2", 0, None),), (0, "void", "void"), "InventoryService.Tagging.EditCategory", ["dataservice.fault.AlreadyExistsFault", "dataservice.fault.InvalidConfigurationFault", ]), ("delete", "DeleteInventoryServiceCategory", "dataservice.version.version2", (), (0, "void", "void"), "InventoryService.Tagging.DeleteCategory", None), ("createTag", "CreateInventoryServiceTag", "dataservice.version.version2", (("info", "dataservice.tagging.TagInfo", "dataservice.version.version2", 0, None),), (0, "dataservice.tagging.Tag", "dataservice.tagging.Tag"), "InventoryService.Tagging.CreateTag", ["dataservice.fault.AlreadyExistsFault", ]), ("bulkCreateTag", "BulkCreateInventoryServiceTag", "dataservice.version.version2", (("info", "dataservice.tagging.TagInfo[]", "dataservice.version.version2", 0, None),), (0, "dataservice.tagging.Category.CreateTagResult[]", "dataservice.tagging.Category.CreateTagResult[]"), "InventoryService.Tagging.CreateTag", None), ("addToUsedBy", "AddToUsedByArrayForCategory", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),), (0, "void", "void"), "InventoryService.Tagging.ModifyUsedByForCategory", None), ("removeFromUsedBy", "RemoveFromUsedByArrayForCategory", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),), (0, "void", "void"), "InventoryService.Tagging.ModifyUsedByForCategory", None)])
CreateDataType("dataservice.tagging.Category.CreateTagResult", "CreateInventoryServiceTagResult", "vmodl.DynamicData", "dataservice.version.version2", [("input", "dataservice.tagging.TagInfo", "dataservice.version.version2", 0), ("result", "dataservice.tagging.Tag", "dataservice.version.version2", F_OPTIONAL), ("error", "vmodl.MethodFault", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.tagging.CategoryInfo", "InventoryServiceCategoryInfo", "vmodl.DynamicData", "dataservice.version.version2", [("name", "string", "dataservice.version.version2", 0), ("description", "string", "dataservice.version.version2", F_OPTIONAL), ("cardinality", "dataservice.tagging.CategoryInfo.Cardinality", "dataservice.version.version2", 0), ("associableEntityType", "dataservice.QName[]", "dataservice.version.version2", F_OPTIONAL), ("scope", "dataservice.tagging.Scope", "dataservice.version.version3", F_OPTIONAL), ("usedBy", "string[]", "dataservice.version.version2", F_OPTIONAL)])
CreateEnumType("dataservice.tagging.CategoryInfo.Cardinality", "InventoryServiceCategoryInfoCardinality", "dataservice.version.version2", ["single", "multiple"])
CreateManagedType("dataservice.tagging.Scope", "InventoryServiceScope", "vmodl.ManagedObject", "dataservice.version.version3", None, [("info", "CheckScopeInfo", "dataservice.version.version3", (), (0, "dataservice.tagging.ScopeInfo", "dataservice.tagging.ScopeInfo"), None, None), ("delete", "DeleteInventoryServiceScope", "dataservice.version.version3", (), (0, "void", "void"), "InventoryService.Tagging.DeleteScope", None)])
CreateManagedType("dataservice.tagging.Tag", "InventoryServiceTag", "vmodl.ManagedObject", "dataservice.version.version2", [("info", "dataservice.tagging.TagInfo", "dataservice.version.version2", 0, None), ("category", "dataservice.tagging.Category", "dataservice.version.version2", 0, None)], [("updateInfo", "UpdateInventoryServiceTagInfo", "dataservice.version.version2", (("newInfo", "dataservice.tagging.TagInfo", "dataservice.version.version2", 0, None),), (0, "void", "void"), "InventoryService.Tagging.EditTag", ["dataservice.fault.AlreadyExistsFault", ]), ("delete", "DeleteInventoryServiceTag", "dataservice.version.version2", (), (0, "void", "void"), "InventoryService.Tagging.DeleteTag", None), ("addToUsedBy", "AddToUsedByArrayForTag", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),), (0, "void", "void"), "InventoryService.Tagging.ModifyUsedByForTag", None), ("removeFromUsedBy", "RemoveFromUsedByArrayForTag", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),), (0, "void", "void"), "InventoryService.Tagging.ModifyUsedByForTag", None)])
CreateDataType("dataservice.tagging.TagAssociationInfo", "InventoryServiceTagAssociationInfo", "vmodl.DynamicData", "dataservice.version.version2", [("tagId", "dataservice.tagging.Tag", "dataservice.version.version2", 0), ("objectId", "vmodl.URI", "dataservice.version.version2", 0), ("tagInfo", "dataservice.tagging.TagInfo", "dataservice.version.version2", F_OPTIONAL), ("categoryInfo", "dataservice.tagging.CategoryInfo", "dataservice.version.version2", F_OPTIONAL), ("generationNumber", "long", "dataservice.version.version2", 0), ("associationExists", "boolean", "dataservice.version.version2", 0)])
CreateDataType("dataservice.tagging.TagInfo", "InventoryServiceTagInfo", "vmodl.DynamicData", "dataservice.version.version2", [("name", "string", "dataservice.version.version2", 0), ("description", "string", "dataservice.version.version2", F_OPTIONAL), ("usedBy", "string[]", "dataservice.version.version2", F_OPTIONAL)])
CreateManagedType("dataservice.tagging.TagManager", "InventoryServiceTagManager", "vmodl.ManagedObject", "dataservice.version.version2", None, [("queryDefaultScope", "QueryDefaultInventoryServiceScope", "dataservice.version.version3", (), (0, "dataservice.tagging.Scope", "dataservice.tagging.Scope"), "System.Anonymous", None), ("queryDefaultSystemScope", "QueryDefaultInventoryServiceSystemScope", "dataservice.version.version3", (), (0, "dataservice.tagging.Scope", "dataservice.tagging.Scope"), "System.Anonymous", None), ("enumerateScopes", "EnumerateInventoryServiceScopes", "dataservice.version.version3", (), (0, "dataservice.tagging.Scope[]", "dataservice.tagging.Scope[]"), "System.Anonymous", None), ("createScope", "CreateInventoryServiceScope", "dataservice.version.version3", (), (0, "dataservice.tagging.Scope", "dataservice.tagging.Scope"), "InventoryService.Tagging.CreateScope", None), ("createSystemScope", "CreateInventoryServiceSystemScope", "dataservice.version.version3", (("flags", "dataservice.tagging.TagManager.SystemScopeFlags", "dataservice.version.version3", 0, None),), (0, "dataservice.tagging.Scope", "dataservice.tagging.Scope"), None, None), ("createCategory", "CreateInventoryServiceCategory", "dataservice.version.version2", (("info", "dataservice.tagging.CategoryInfo", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope", "dataservice.version.version3", F_OPTIONAL, None),), (0, "dataservice.tagging.Category", "dataservice.tagging.Category"), "InventoryService.Tagging.CreateCategory", ["dataservice.fault.AlreadyExistsFault", "vmodl.fault.InvalidArgument", ]), ("createDefaultSystemCategory", "CreateInventoryServiceDefaultSystemCategory", "dataservice.version.version2", (("info", "dataservice.tagging.CategoryInfo", "dataservice.version.version2", 0, None),), (0, "dataservice.tagging.Category", "dataservice.tagging.Category"), "InventoryService.Tagging.CreateCategory", ["dataservice.fault.AlreadyExistsFault", "vmodl.fault.InvalidArgument", ]), ("revokePropagatingPrivOnCategories", "RevokePropagatingPrivilegesOnCategories", "dataservice.version.version2", (("category", "vmodl.URI", "dataservice.version.version2", 0, None),), (0, "void", "void"), None, None), ("revokePropagatingPrivOnTags", "RevokePropagatingPrivilegesOnTags", "dataservice.version.version2", (("tag", "vmodl.URI", "dataservice.version.version2", 0, None),), (0, "void", "void"), None, None), ("bulkCreateCategory", "BulkCreateInventoryServiceCategory", "dataservice.version.version2", (("info", "dataservice.tagging.CategoryInfo[]", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope", "dataservice.version.version3", F_OPTIONAL, None),), (0, "dataservice.tagging.TagManager.CreateCategoryResult[]", "dataservice.tagging.TagManager.CreateCategoryResult[]"), None, None), ("bulkCreateDefaultSystemCategory", "BulkCreateInventoryServiceSystemCategory", "dataservice.version.version2", (("info", "dataservice.tagging.CategoryInfo[]", "dataservice.version.version2", 0, None),), (0, "dataservice.tagging.TagManager.CreateCategoryResult[]", "dataservice.tagging.TagManager.CreateCategoryResult[]"), None, None), ("attachTagsToObject", "AttachInventoryServiceTagsToObject", "dataservice.version.version2", (("object", "vmodl.URI", "dataservice.version.version2", 0, None),("tag", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0, "InventoryService.Tagging.AttachTag"),), (0, "void", "void"), None, ["vmodl.fault.ManagedObjectNotFound", "dataservice.fault.InvalidTagAssociationFault", ]), ("attachTagsToManagedObject", "AttachInventoryServiceTagsToManagedObject", "dataservice.version.version2", (("objectMoRef", "vmodl.ManagedObject", "dataservice.version.version2", 0, None),("tag", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0, "InventoryService.Tagging.AttachTag"),), (0, "void", "void"), None, ["vmodl.fault.ManagedObjectNotFound", "dataservice.fault.InvalidTagAssociationFault", ]), ("bulkAttachTags", "BulkAttachInventoryServiceTags", "dataservice.version.version2", (("objects", "vmodl.URI[]", "dataservice.version.version2", 0, None),("tag", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0, "InventoryService.Tagging.AttachTag"),), (0, "dataservice.tagging.TagManager.AttachTagsResult[]", "dataservice.tagging.TagManager.AttachTagsResult[]"), None, None), ("detachTagsFromObject", "DetachInventoryServiceTagsFromObject", "dataservice.version.version2", (("object", "vmodl.URI", "dataservice.version.version2", 0, None),("tag", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0, "InventoryService.Tagging.AttachTag"),), (0, "void", "void"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("detachTagsFromManagedObject", "DetachInventoryServiceTagsFromManagedObject", "dataservice.version.version2", (("objectMoRef", "vmodl.ManagedObject", "dataservice.version.version2", 0, None),("tag", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0, "InventoryService.Tagging.AttachTag"),), (0, "void", "void"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("bulkDetachTags", "BulkDetachInventoryServiceTags", "dataservice.version.version2", (("objects", "vmodl.URI[]", "dataservice.version.version2", 0, None),("tag", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0, "InventoryService.Tagging.AttachTag"),), (0, "dataservice.tagging.TagManager.DetachTagsResult[]", "dataservice.tagging.TagManager.DetachTagsResult[]"), None, None), ("enumerateTags", "EnumerateInventoryServiceTags", "dataservice.version.version2", (("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("enumerateTagsForUsedBy", "EnumerateInventoryServiceTagsForUsedBy", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("enumerateDefaultSystemTags", "EnumerateInventoryServiceDefaultSystemTags", "dataservice.version.version2", (), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("enumerateDefaultSystemTagsForUsedBy", "EnumerateInventoryServiceDefaultSystemTagsForUsedBy", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("enumerateCategories", "EnumerateInventoryServiceCategories", "dataservice.version.version2", (("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Category[]", "dataservice.tagging.Category[]"), None, None), ("enumerateCategoriesForUsedBy", "EnumerateInventoryServiceCategoriesForUsedBy", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Category[]", "dataservice.tagging.Category[]"), None, None), ("enumerateDefaultSystemCategories", "EnumerateInventoryServiceDefaultSystemCategories", "dataservice.version.version2", (), (F_OPTIONAL, "dataservice.tagging.Category[]", "dataservice.tagging.Category[]"), None, None), ("enumerateDefaultSystemCategoriesForUsedBy", "EnumerateInventoryServiceDefaultSystemCategoriesForUsedBy", "dataservice.version.version2", (("usedByField", "string", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Category[]", "dataservice.tagging.Category[]"), None, None), ("queryAttachableTags", "QueryAttachableInventoryServiceTags", "dataservice.version.version2", (("object", "vmodl.URI[]", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("queryAttachableDefaultSystemTags", "QueryAttachableInventoryServiceDefaultSystemTags", "dataservice.version.version2", (("object", "vmodl.URI[]", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("queryAttachableTagsToManagedObject", "QueryAttachableInventoryServiceTagsToGivenManagedObjects", "dataservice.version.version2", (("objectMoRef", "vmodl.ManagedObject[]", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("queryAttachableDefaultSystemTagsToManagedObject", "QueryAttachableInventoryServiceDefaultSystemTagsToGivenManagedObjects", "dataservice.version.version2", (("objectMoRef", "vmodl.ManagedObject[]", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("queryAttachedObjects", "QueryAttachedInventoryServiceObjects", "dataservice.version.version2", (("tag", "dataservice.tagging.Tag", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "vmodl.URI[]", "vmodl.URI[]"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("queryAttachedManagedObjects", "QueryAttachedInventoryServiceManagedObjects", "dataservice.version.version2", (("tag", "dataservice.tagging.Tag", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "vmodl.ManagedObject[]", "vmodl.ManagedObject[]"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("queryAttachedTags", "QueryAttachedInventoryServiceTags", "dataservice.version.version2", (("object", "vmodl.URI", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("queryAttachedTagsForUsedBy", "QueryAttachedInventoryServiceTagsForUsedBy", "dataservice.version.version2", (("usedBy", "string", "dataservice.version.version2", 0, None),("object", "vmodl.URI", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("queryMapOfAttachedTagsForUsedBy", "QueryMapofAttachedInventoryServiceTagsForUsedBy", "dataservice.version.version3", (("usedBy", "string", "dataservice.version.version3", 0, None),("objects", "vmodl.URI[]", "dataservice.version.version3", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.TaggingEntry[]", "dataservice.tagging.TaggingEntry[]"), None, None), ("queryMapOfAttachedTags", "QueryMapofAttachedInventoryServiceTags", "dataservice.version.version3", (("objects", "vmodl.URI[]", "dataservice.version.version3", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.TaggingEntry[]", "dataservice.tagging.TaggingEntry[]"), None, None), ("queryAttachedDefaultSystemTags", "QueryAttachedInventoryServiceDefaultSystemTags", "dataservice.version.version2", (("object", "vmodl.URI", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("queryAttachedTagsToManagedObject", "QueryAttachedInventoryServiceTagsToGivenManagedObject", "dataservice.version.version2", (("objectMoRef", "vmodl.ManagedObject", "dataservice.version.version2", 0, None),("scope", "dataservice.tagging.Scope[]", "dataservice.version.version3", F_OPTIONAL, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("queryAttachedDefaultSystemTagsToManagedObject", "QueryAttachedInventoryServiceDefaultSystemTagsToGivenManagedObject", "dataservice.version.version2", (("objectMoRef", "vmodl.ManagedObject", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, ["vmodl.fault.ManagedObjectNotFound", ]), ("queryTagsByCategory", "QueryInventoryServiceTagsByCategory", "dataservice.version.version2", (("category", "dataservice.tagging.Category", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("queryTagsByCategoryForUsedBy", "QueryInventoryServiceTagsByCategoryForUsedBy", "dataservice.version.version2", (("category", "dataservice.tagging.Category", "dataservice.version.version2", 0, None),("usedByField", "string", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.Tag[]", "dataservice.tagging.Tag[]"), None, None), ("moRefToAuthUri", "FetchAuthUriForManagedObject", "dataservice.version.version2", (("objectMoRef", "vmodl.ManagedObject", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "string", "string"), None, None), ("getModifiedTagAssociations", "GetModifiedTagAssociations", "dataservice.version.version2", (("objectType", "dataservice.QName", "dataservice.version.version2", 0, None),("lastSeenGeneration", "long", "dataservice.version.version2", 0, None),), (F_OPTIONAL, "dataservice.tagging.TagAssociationInfo[]", "dataservice.tagging.TagAssociationInfo[]"), None, None)])
CreateEnumType("dataservice.tagging.TagManager.SystemScopeFlags", "SystemScopeFlags", "dataservice.version.version3", ["USER_VISIBLE_ONLY", "USER_VISIBLE_AND_ASSIGNABLE", "NOT_USER_VISIBLE"])
CreateDataType("dataservice.tagging.TagManager.CreateCategoryResult", "CreateInventoryServiceCategoryResult", "vmodl.DynamicData", "dataservice.version.version2", [("input", "dataservice.tagging.CategoryInfo", "dataservice.version.version2", 0), ("result", "dataservice.tagging.Category", "dataservice.version.version2", F_OPTIONAL), ("error", "vmodl.MethodFault", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.tagging.TagManager.AttachTagsResult", "AttachInventoryServiceTagsResult", "vmodl.DynamicData", "dataservice.version.version2", [("object", "vmodl.URI", "dataservice.version.version2", 0), ("tags", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0), ("error", "vmodl.MethodFault", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.tagging.TagManager.DetachTagsResult", "DetachInventoryServiceTagsResult", "vmodl.DynamicData", "dataservice.version.version2", [("object", "vmodl.URI", "dataservice.version.version2", 0), ("tags", "dataservice.tagging.Tag[]", "dataservice.version.version2", 0), ("error", "vmodl.MethodFault", "dataservice.version.version2", F_OPTIONAL)])
CreateDataType("dataservice.tagging.TaggingEntry", "TaggingEntry", "vmodl.DynamicData", "dataservice.version.version3", [("key", "string", "dataservice.version.version3", 0), ("value", "string", "dataservice.version.version3", 0), ("tagName", "string", "dataservice.version.version3", 0), ("parentCategory", "string", "dataservice.version.version3", 0), ("parentCategoryName", "string", "dataservice.version.version3", 0)])
CreateDataType("dataservice.tagging.ScopeInfo", "InventoryServiceScopeInfo", "vmodl.DynamicData", "dataservice.version.version3", [("systemScope", "boolean", "dataservice.version.version3", 0), ("systemScopeFlags", "dataservice.tagging.TagManager.SystemScopeFlags", "dataservice.version.version3", F_OPTIONAL), ("authorizationURI", "vmodl.URI", "dataservice.version.version3", 0)])
| 542.019802 | 13,555 | 0.771811 | 4,685 | 54,744 | 8.986766 | 0.094984 | 0.205639 | 0.221694 | 0.108377 | 0.678479 | 0.637413 | 0.550222 | 0.51336 | 0.477496 | 0.435147 | 0 | 0.015147 | 0.046106 | 54,744 | 100 | 13,556 | 547.44 | 0.791115 | 0.001078 | 0 | 0 | 1 | 0 | 0.732751 | 0.605984 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.010417 | 0.083333 | 0 | 0.083333 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
d215b4a5014b431d8e4ce68f792b15fdac287741 | 183 | py | Python | starfish/listing/__init__.py | datacraft-dsc/starfish-py | 95ff24410f056e8e2d313c3af97439fe003e294a | [
"Apache-2.0"
] | 4 | 2019-02-08T03:47:36.000Z | 2019-10-17T21:45:23.000Z | starfish/listing/__init__.py | datacraft-dsc/starfish-py | 95ff24410f056e8e2d313c3af97439fe003e294a | [
"Apache-2.0"
] | 81 | 2019-02-09T01:01:51.000Z | 2020-07-01T08:35:07.000Z | starfish/listing/__init__.py | oceanprotocol/ocean-py | 318ad0de2519e61d0a301c040a48d1839cd82425 | [
"Apache-2.0"
] | 1 | 2021-01-28T12:14:03.000Z | 2021-01-28T12:14:03.000Z | """
Listing Module
"""
from starfish.listing.listing import Listing # noqa: F401
from starfish.listing.listing_base import ListingBase # noqa: F401
| 22.875 | 76 | 0.63388 | 19 | 183 | 6.052632 | 0.473684 | 0.208696 | 0.330435 | 0.452174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.046154 | 0.289617 | 183 | 7 | 77 | 26.142857 | 0.838462 | 0.202186 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
d22d01f261b272a054a4042d846b288526e6cb11 | 6,434 | py | Python | asv_bench/benchmarks/binary_ops.py | raspbian-packages/pandas | fb33806b5286deb327b2e0fa96aedf25a6ed563f | [
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | 2 | 2017-05-07T02:08:12.000Z | 2017-12-14T01:47:47.000Z | asv_bench/benchmarks/binary_ops.py | raspbian-packages/pandas | fb33806b5286deb327b2e0fa96aedf25a6ed563f | [
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | 1 | 2016-09-30T11:15:32.000Z | 2016-09-30T11:15:32.000Z | asv_bench/benchmarks/binary_ops.py | raspbian-packages/pandas | fb33806b5286deb327b2e0fa96aedf25a6ed563f | [
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause"
] | 1 | 2020-12-09T12:02:39.000Z | 2020-12-09T12:02:39.000Z | from .pandas_vb_common import *
import pandas.computation.expressions as expr
class frame_add(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
def time_frame_add(self):
(self.df + self.df2)
class frame_add_no_ne(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
expr.set_use_numexpr(False)
def time_frame_add_no_ne(self):
(self.df + self.df2)
def teardown(self):
expr.set_use_numexpr(True)
class frame_add_st(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
expr.set_numexpr_threads(1)
def time_frame_add_st(self):
(self.df + self.df2)
def teardown(self):
expr.set_numexpr_threads()
class frame_float_div(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(1000, 1000))
self.df2 = DataFrame(np.random.randn(1000, 1000))
def time_frame_float_div(self):
(self.df // self.df2)
class frame_float_div_by_zero(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(1000, 1000))
def time_frame_float_div_by_zero(self):
(self.df / 0)
class frame_float_floor_by_zero(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(1000, 1000))
def time_frame_float_floor_by_zero(self):
(self.df // 0)
class frame_float_mod(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(1000, 1000))
self.df2 = DataFrame(np.random.randn(1000, 1000))
def time_frame_float_mod(self):
(self.df / self.df2)
class frame_int_div_by_zero(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.random_integers(np.iinfo(np.int16).min, np.iinfo(np.int16).max, size=(1000, 1000)))
def time_frame_int_div_by_zero(self):
(self.df / 0)
class frame_int_mod(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.random_integers(np.iinfo(np.int16).min, np.iinfo(np.int16).max, size=(1000, 1000)))
self.df2 = DataFrame(np.random.random_integers(np.iinfo(np.int16).min, np.iinfo(np.int16).max, size=(1000, 1000)))
def time_frame_int_mod(self):
(self.df / self.df2)
class frame_mult(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
def time_frame_mult(self):
(self.df * self.df2)
class frame_mult_no_ne(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
expr.set_use_numexpr(False)
def time_frame_mult_no_ne(self):
(self.df * self.df2)
def teardown(self):
expr.set_use_numexpr(True)
class frame_mult_st(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
expr.set_numexpr_threads(1)
def time_frame_mult_st(self):
(self.df * self.df2)
def teardown(self):
expr.set_numexpr_threads()
class frame_multi_and(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
def time_frame_multi_and(self):
self.df[((self.df > 0) & (self.df2 > 0))]
class frame_multi_and_no_ne(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
expr.set_use_numexpr(False)
def time_frame_multi_and_no_ne(self):
self.df[((self.df > 0) & (self.df2 > 0))]
def teardown(self):
expr.set_use_numexpr(True)
class frame_multi_and_st(object):
goal_time = 0.2
def setup(self):
self.df = DataFrame(np.random.randn(20000, 100))
self.df2 = DataFrame(np.random.randn(20000, 100))
expr.set_numexpr_threads(1)
def time_frame_multi_and_st(self):
self.df[((self.df > 0) & (self.df2 > 0))]
def teardown(self):
expr.set_numexpr_threads()
class series_timestamp_compare(object):
goal_time = 0.2
def setup(self):
self.N = 1000000
self.halfway = ((self.N // 2) - 1)
self.s = Series(date_range('20010101', periods=self.N, freq='T'))
self.ts = self.s[self.halfway]
def time_series_timestamp_compare(self):
(self.s <= self.ts)
class timestamp_ops_diff1(object):
goal_time = 0.2
N = 1000000
def setup(self):
self.s = self.create()
def create(self):
return Series(date_range('20010101', periods=self.N, freq='s'))
def time_timestamp_ops_diff1(self):
self.s.diff()
class timestamp_tz_ops_diff1(timestamp_ops_diff1):
N = 10000
def create(self):
return Series(date_range('20010101', periods=self.N, freq='s', tz='US/Eastern'))
class timestamp_ops_diff2(object):
goal_time = 0.2
N = 1000000
def setup(self):
self.s = self.create()
def create(self):
return Series(date_range('20010101', periods=self.N, freq='s'))
def time_timestamp_ops_diff2(self):
(self.s - self.s.shift())
class timestamp_tz_ops_diff2(timestamp_ops_diff2):
N = 10000
def create(self):
return Series(date_range('20010101', periods=self.N, freq='s', tz='US/Eastern'))
class timestamp_series_compare(object):
goal_time = 0.2
N = 1000000
def setup(self):
self.halfway = ((self.N // 2) - 1)
self.s = self.create()
self.ts = self.s[self.halfway]
def create(self):
return Series(date_range('20010101', periods=self.N, freq='T'))
def time_timestamp_series_compare(self):
(self.ts >= self.s)
class timestamp_tz_series_compare(timestamp_series_compare):
N = 10000
def create(self):
return Series(date_range('20010101', periods=self.N, freq='T', tz='US/Eastern'))
| 24.557252 | 122 | 0.638328 | 956 | 6,434 | 4.104603 | 0.084728 | 0.077472 | 0.076453 | 0.134557 | 0.877676 | 0.86315 | 0.858563 | 0.83104 | 0.791284 | 0.74261 | 0 | 0.082982 | 0.226453 | 6,434 | 261 | 123 | 24.651341 | 0.705445 | 0 | 0 | 0.686047 | 0 | 0 | 0.014454 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.290698 | false | 0 | 0.011628 | 0.034884 | 0.610465 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
d269a76fd336f3b55bafbc226590d43dc067dc89 | 21,169 | py | Python | pontoon/sync/tests/formats/test_silme.py | sudheesh001/pontoon | 046bb50113a76ff278de73036db395ea726cab43 | [
"BSD-3-Clause"
] | null | null | null | pontoon/sync/tests/formats/test_silme.py | sudheesh001/pontoon | 046bb50113a76ff278de73036db395ea726cab43 | [
"BSD-3-Clause"
] | null | null | null | pontoon/sync/tests/formats/test_silme.py | sudheesh001/pontoon | 046bb50113a76ff278de73036db395ea726cab43 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import # Same name as silme library.
import os.path
import tempfile
from textwrap import dedent
from django_nose.tools import assert_equal, assert_raises, assert_true
from silme.format.dtd import FormatParser as DTDParser
from pontoon.base.tests import (
assert_attributes_equal,
create_tempfile,
LocaleFactory,
TestCase,
)
from pontoon.sync.formats import silme
from pontoon.sync.tests.formats import FormatTestsMixin
class SilmeResourceTests(TestCase):
def test_init_missing_resource(self):
"""
If the translated resource doesn't exist and no source resource
is given, raise an IOError.
"""
path = os.path.join(tempfile.mkdtemp(), 'does', 'not', 'exist.dtd')
with assert_raises(IOError):
silme.SilmeResource(DTDParser, path, source_resource=None)
def create_nonexistant_resource(self, path):
source_path = create_tempfile(dedent("""
<!ENTITY SourceString "Source String">
"""))
source_resource = silme.SilmeResource(DTDParser, source_path)
return silme.SilmeResource(
DTDParser, path,
source_resource=source_resource
)
def test_init_missing_resource_with_source(self):
"""
If the translated resource doesn't exist but a source resource
is given, return a resource with empty translations.
"""
path = os.path.join(tempfile.mkdtemp(), 'does', 'not', 'exist.dtd')
translated_resource = self.create_nonexistant_resource(path)
assert_equal(len(translated_resource.translations), 1)
translation = translated_resource.translations[0]
assert_equal(translation.strings, {})
def test_save_create_dirs(self):
"""
If the directories in a resource's path don't exist, create them
on save.
"""
path = os.path.join(tempfile.mkdtemp(), 'does', 'not', 'exist.dtd')
translated_resource = self.create_nonexistant_resource(path)
translated_resource.translations[0].strings = {None: 'New Translated String'}
translated_resource.save(LocaleFactory.create())
assert_true(os.path.exists(path))
BASE_DTD_FILE = """
<!-- Sample comment -->
<!ENTITY SourceString "Translated String">
<!-- First comment -->
<!-- Second comment -->
<!ENTITY MultipleComments "Translated Multiple Comments">
<!ENTITY NoCommentsorSources "Translated No Comments or Sources">
<!ENTITY EmptyTranslation "">
"""
class DTDTests(FormatTestsMixin, TestCase):
parse = staticmethod(silme.parse_dtd)
supports_keys = False
supports_source = False
supports_source_string = False
def key(self, source_string):
"""DTD keys can't contain spaces."""
return super(DTDTests, self).key(source_string).replace(' ', '')
def test_parse_basic(self):
self.run_parse_basic(BASE_DTD_FILE, 0)
def test_parse_multiple_comments(self):
self.run_parse_multiple_comments(BASE_DTD_FILE, 1)
def test_parse_no_comments_no_sources(self):
self.run_parse_no_comments_no_sources(BASE_DTD_FILE, 2)
def test_parse_empty_translation(self):
self.run_parse_empty_translation(BASE_DTD_FILE, 3)
def test_save_basic(self):
input_string = dedent("""
<!-- Comment -->
<!ENTITY SourceString "Source String">
""")
expected_string = dedent("""
<!-- Comment -->
<!ENTITY SourceString "New Translated String">
""")
self.run_save_basic(input_string, expected_string, source_string=input_string)
def test_save_remove(self):
"""Deleting strings removes them completely from the DTD file."""
input_string = dedent("""
<!-- Comment -->
<!ENTITY SourceString "Source String">
""")
expected_string = dedent("""
<!-- Comment -->
""")
self.run_save_remove(input_string, expected_string, source_string=input_string)
def test_save_source_removed(self):
"""
If an entity is missing from the source resource, remove it from
the translated resource.
"""
source_string = dedent("""
<!ENTITY SourceString "Source String">
""")
input_string = dedent("""
<!ENTITY MissingSourceString "Translated Missing String">
<!ENTITY SourceString "Translated String">
""")
expected_string = dedent("""
<!ENTITY SourceString "Translated String">
""")
self.run_save_no_changes(input_string, expected_string, source_string=source_string)
def test_save_source_no_translation(self):
"""
If an entity is missing from the translated resource and has no
translation, do not add it back in.
"""
source_string = dedent("""
<!ENTITY SourceString "Source String">
<!ENTITY OtherSourceString "Other String">
""")
input_string = dedent("""
<!ENTITY OtherSourceString "Translated Other String">
""")
self.run_save_no_changes(input_string, input_string, source_string=source_string)
def test_save_translation_missing(self):
source_string = dedent("""
<!ENTITY String "Source String">
<!ENTITY MissingString "Missing Source String">
""")
input_string = dedent("""
<!ENTITY String "Translated String">
""")
expected_string = dedent("""
<!ENTITY String "Translated String">
<!ENTITY MissingString "Translated Missing String">
""")
self.run_save_translation_missing(source_string, input_string, expected_string)
def test_save_translation_identical(self):
source_string = dedent("""
<!ENTITY String "Source String">
""")
input_string = dedent("""
<!ENTITY String "Translated String">
""")
expected_string = dedent("""
<!ENTITY String "Source String">
""")
self.run_save_translation_identical(source_string, input_string, expected_string)
BASE_PROPERTIES_FILE = """
# Sample comment
SourceString=Translated String
# First comment
# Second comment
MultipleComments=Translated Multiple Comments
NoCommentsorSources=Translated No Comments or Sources
EmptyTranslation=
"""
class PropertiesTests(FormatTestsMixin, TestCase):
parse = staticmethod(silme.parse_properties)
supports_keys = False
supports_source = False
supports_source_string = False
def key(self, source_string):
"""Properties keys can't contain spaces."""
return super(PropertiesTests, self).key(source_string).replace(' ', '')
def test_parse_basic(self):
self.run_parse_basic(BASE_PROPERTIES_FILE, 0)
def test_parse_multiple_comments(self):
#import ipdb; ipdb.set_trace()
self.run_parse_multiple_comments(BASE_PROPERTIES_FILE, 1)
def test_parse_no_comments_no_sources(self):
self.run_parse_no_comments_no_sources(BASE_PROPERTIES_FILE, 2)
def test_parse_empty_translation(self):
self.run_parse_empty_translation(BASE_PROPERTIES_FILE, 3)
def test_save_basic(self):
input_string = dedent("""
# Comment
SourceString=Source String
""")
expected_string = dedent("""
# Comment
SourceString=New Translated String
""")
self.run_save_basic(input_string, expected_string, source_string=input_string)
def test_save_remove(self):
"""
Deleting strings removes them completely from the properties
file.
"""
input_string = dedent("""
# Comment
SourceString=Source String
""")
expected_string = dedent("""
# Comment
""")
self.run_save_remove(input_string, expected_string, source_string=input_string)
def test_save_source_removed(self):
"""
If an entity is missing from the source resource, remove it from
the translated resource.
"""
source_string = dedent("""
SourceString=Source String
""")
input_string = dedent("""
MissingSourceString=Translated Missing String
SourceString=Translated String
""")
expected_string = dedent("""
SourceString=Translated String
""")
self.run_save_no_changes(input_string, expected_string, source_string=source_string)
def test_save_source_no_translation(self):
"""
If an entity is missing from the translated resource and has no
translation, do not add it back in.
"""
source_string = dedent("""
SourceString=Source String
OtherSourceString=Other String
""")
input_string = dedent("""
OtherSourceString=Translated Other String
""")
self.run_save_no_changes(input_string, input_string, source_string=source_string)
def test_save_translation_missing(self):
source_string = dedent("""
String=Source String
MissingString=Missing Source String
""")
input_string = dedent("""
String=Translated String
""")
expected_string = dedent("""
String=Translated String
MissingString=Translated Missing String
""")
self.run_save_translation_missing(source_string, input_string, expected_string)
def test_save_translation_identical(self):
source_string = dedent("""
String=Source String
""")
input_string = dedent("""
String=Translated String
""")
expected_string = dedent("""
String=Source String
""")
self.run_save_translation_identical(source_string, input_string, expected_string)
BASE_INI_FILE = """
[Strings]
# Sample comment
SourceString=Translated String
# First comment
# Second comment
MultipleComments=Translated Multiple Comments
NoCommentsorSources=Translated No Comments or Sources
EmptyTranslation=
"""
class IniTests(FormatTestsMixin, TestCase):
parse = staticmethod(silme.parse_properties)
supports_keys = False
supports_source = False
supports_source_string = False
def key(self, source_string):
"""Ini keys can't contain spaces."""
return super(IniTests, self).key(source_string).replace(' ', '')
def test_parse_basic(self):
self.run_parse_basic(BASE_INI_FILE, 0)
def test_parse_multiple_comments(self):
self.run_parse_multiple_comments(BASE_INI_FILE, 1)
def test_parse_no_comments_no_sources(self):
self.run_parse_no_comments_no_sources(BASE_INI_FILE, 2)
def test_parse_empty_translation(self):
self.run_parse_empty_translation(BASE_INI_FILE, 3)
def test_save_basic(self):
input_string = dedent("""
[Strings]
# Comment
SourceString=Source String
""")
expected_string = dedent("""
[Strings]
# Comment
SourceString=New Translated String
""")
self.run_save_basic(input_string, expected_string, source_string=input_string)
def test_save_remove(self):
"""
Deleting strings removes them completely from the ini file.
"""
input_string = dedent("""
[Strings]
# Comment
SourceString=Source String
""")
expected_string = dedent("""
[Strings]
# Comment
""")
self.run_save_remove(input_string, expected_string, source_string=input_string)
def test_save_source_removed(self):
"""
If an entity is missing from the source resource, remove it from
the translated resource.
"""
source_string = dedent("""
[Strings]
SourceString=Source String
""")
input_string = dedent("""
[Strings]
MissingSourceString=Translated Missing String
SourceString=Translated String
""")
expected_string = dedent("""
[Strings]
SourceString=Translated String
""")
self.run_save_no_changes(input_string, expected_string, source_string=source_string)
def test_save_source_no_translation(self):
"""
If an entity is missing from the translated resource and has no
translation, do not add it back in.
"""
source_string = dedent("""
[Strings]
SourceString=Source String
OtherSourceString=Other String
""")
input_string = dedent("""
[Strings]
OtherSourceString=Translated Other String
""")
self.run_save_no_changes(input_string, input_string, source_string=source_string)
def test_save_translation_missing(self):
source_string = dedent("""
[Strings]
String=Source String
MissingString=Missing Source String
""")
input_string = dedent("""
[Strings]
String=Translated String
""")
expected_string = dedent("""
[Strings]
String=Translated String
MissingString=Translated Missing String
""")
self.run_save_translation_missing(source_string, input_string, expected_string)
def test_save_translation_identical(self):
source_string = dedent("""
[Strings]
String=Source String
""")
input_string = dedent("""
[Strings]
String=Translated String
""")
expected_string = dedent("""
[Strings]
String=Source String
""")
self.run_save_translation_identical(source_string, input_string, expected_string)
BASE_INC_FILE = """
# Sample comment
#define SourceString Translated String
# First comment
# Second comment
#define MultipleComments Translated Multiple Comments
#define NoCommentsorSources Translated No Comments or Sources
#define EmptyTranslation\x20
"""
class IncTests(FormatTestsMixin, TestCase):
parse = staticmethod(silme.parse_inc)
supports_keys = False
supports_source = False
supports_source_string = False
def key(self, source_string):
"""Inc keys can't contain spaces."""
return super(IncTests, self).key(source_string).replace(' ', '')
def test_parse_basic(self):
self.run_parse_basic(BASE_INC_FILE, 0)
def test_parse_multiple_comments(self):
self.run_parse_multiple_comments(BASE_INC_FILE, 1)
def test_parse_no_comments_no_sources(self):
self.run_parse_no_comments_no_sources(BASE_INC_FILE, 2)
def test_parse_empty_translation(self):
self.run_parse_empty_translation(BASE_INC_FILE, 3)
def test_save_basic(self):
input_string = dedent("""
# Comment
#define SourceString Source String
""")
expected_string = dedent("""
# Comment
#define SourceString New Translated String
""")
self.run_save_basic(input_string, expected_string, source_string=input_string)
def test_save_remove(self):
"""
Deleting strings removes them completely from the inc file.
"""
input_string = dedent("""
# Comment
#define SourceString Source String
""")
expected_string = dedent("""
# Comment
""")
self.run_save_remove(input_string, expected_string, source_string=input_string)
def test_save_source_removed(self):
"""
If an entity is missing from the source resource, remove it from
the translated resource.
"""
source_string = dedent("""
#define SourceString Source String
""")
input_string = dedent("""
#define MissingSourceString Translated Missing String
#define SourceString Translated String
""")
expected_string = dedent("""
#define SourceString Translated String
""")
self.run_save_no_changes(input_string, expected_string, source_string=source_string)
def test_save_source_no_translation(self):
"""
If an entity is missing from the translated resource and has no
translation, do not add it back in.
"""
source_string = dedent("""
#define SourceString Source String
#define OtherSourceString Other String
""")
input_string = dedent("""
#define OtherSourceString Translated Other String
""")
self.run_save_no_changes(input_string, input_string, source_string=source_string)
def test_save_translation_missing(self):
source_string = dedent("""
#define String Source String
#define MissingString Missing Source String
""")
input_string = dedent("""
#define String Translated String
""")
expected_string = dedent("""
#define String Translated String
#define MissingString Translated Missing String
""")
self.run_save_translation_missing(source_string, input_string, expected_string)
def test_save_translation_identical(self):
source_string = dedent("""
#define String Source String
""")
input_string = dedent("""
#define String Translated String
""")
expected_string = dedent("""
#define String Source String
""")
self.run_save_translation_identical(source_string, input_string, expected_string)
def test_moz_langpack_contributors(self):
"""
If a .inc file has a commented-out entity named
MOZ_LANGPACK_CONTRIBUTORS, the parser should un-comment it and
process it as an entity.
"""
input_string = dedent("""
#define String Some String
# #define MOZ_LANGPACK_CONTRIBUTORS Contributor list
""")
path, resource = self.parse_string(input_string)
assert_equal(len(resource.translations), 2)
assert_attributes_equal(
resource.translations[1],
key='MOZ_LANGPACK_CONTRIBUTORS',
strings={None: 'Contributor list'}
)
def test_moz_langpack_contributors_source(self):
"""
If a source resource was provided, meaning that we're parsing a
translated resource, do not uncomment MOZ_LANGPACK_CONTRIBUTORS.
"""
input_string = dedent("""
#define String Some String
# #define MOZ_LANGPACK_CONTRIBUTORS Contributor list
""")
source_string = dedent("""
#define String Translated String
# #define MOZ_LANGPACK_CONTRIBUTORS Other Contributors
""")
path, resource = self.parse_string(input_string, source_string=source_string)
assert_equal(len(resource.translations), 2)
assert_attributes_equal(
resource.translations[1],
key='MOZ_LANGPACK_CONTRIBUTORS',
strings={} # Imported from source == no translations
)
def test_save_moz_langpack_contributors(self):
"""
When saving, if a translation exists for
MOZ_LANGPACK_CONTRIBUTORS, uncomment it.
"""
input_string = dedent("""
#define String Some String
# #define MOZ_LANGPACK_CONTRIBUTORS Contributor list
""")
source_string = dedent("""
#define String Translated String
# #define MOZ_LANGPACK_CONTRIBUTORS Contributor list
""")
path, resource = self.parse_string(input_string, source_string=source_string)
resource.entities['MOZ_LANGPACK_CONTRIBUTORS'].strings = {None: 'New Contributor list'}
resource.save(self.locale)
self.assert_file_content(path, dedent("""
#define String Some String
#define MOZ_LANGPACK_CONTRIBUTORS New Contributor list
"""))
def test_save_moz_langpack_contributors_no_translations(self):
"""
When saving, if a translation does not exist for
MOZ_LANGPACK_CONTRIBUTORS, leave it commented.
"""
input_string = dedent("""
#define String Some String
#define MOZ_LANGPACK_CONTRIBUTORS Modified contributor list
""")
source_string = dedent("""
#define String Translated String
# #define MOZ_LANGPACK_CONTRIBUTORS Contributor list
""")
path, resource = self.parse_string(input_string, source_string=source_string)
resource.entities['MOZ_LANGPACK_CONTRIBUTORS'].strings = {}
resource.save(self.locale)
self.assert_file_content(path, dedent("""
#define String Some String
# #define MOZ_LANGPACK_CONTRIBUTORS Contributor list
"""))
| 31.454681 | 95 | 0.63456 | 2,180 | 21,169 | 5.895413 | 0.080275 | 0.095238 | 0.058824 | 0.050109 | 0.841036 | 0.826097 | 0.765717 | 0.729925 | 0.71343 | 0.702692 | 0 | 0.001633 | 0.276867 | 21,169 | 672 | 96 | 31.501488 | 0.837928 | 0.092588 | 0 | 0.790598 | 0 | 0 | 0.369062 | 0.044496 | 0 | 0 | 0 | 0 | 0.025641 | 1 | 0.111111 | false | 0 | 0.019231 | 0 | 0.185897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
964bbc4fa0b69333c70f6d22cd54a12cb816c58c | 117 | py | Python | gsfpy/constants.py | irewolepeter/gsfpy_USM_Implementation | c4614ac3f7d833eb86ea38c7708108b130f96612 | [
"MIT"
] | 7 | 2020-07-01T07:12:19.000Z | 2022-01-20T20:39:57.000Z | gsfpy/constants.py | irewolepeter/gsfpy_USM_Implementation | c4614ac3f7d833eb86ea38c7708108b130f96612 | [
"MIT"
] | 36 | 2020-06-23T09:10:15.000Z | 2022-03-22T10:27:58.000Z | gsfpy/constants.py | irewolepeter/gsfpy_USM_Implementation | c4614ac3f7d833eb86ea38c7708108b130f96612 | [
"MIT"
] | 2 | 2021-02-07T13:21:52.000Z | 2021-06-24T19:16:16.000Z | from gsfpy import mirror_default_gsf_version_submodule
mirror_default_gsf_version_submodule(globals(), "constants")
| 29.25 | 60 | 0.880342 | 15 | 117 | 6.333333 | 0.666667 | 0.273684 | 0.336842 | 0.484211 | 0.673684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.059829 | 117 | 3 | 61 | 39 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
73dd7ac4efa60e06da75b885101ffdb45077299b | 9,227 | py | Python | tests/unit/file_handling_tests/test_config_secret_file_usage.py | blinkhealth/vault-anyconfig | 5f23cbcdf8d6bccb2e1a79d62b8757d5262fc4b4 | [
"Apache-2.0"
] | 6 | 2019-04-10T06:02:07.000Z | 2021-09-18T19:13:09.000Z | tests/unit/file_handling_tests/test_config_secret_file_usage.py | tomtom-international/vault-anyconf | 5292319c483a56108d6a1d6888520c964ae185b2 | [
"Apache-2.0"
] | 11 | 2019-04-01T08:08:56.000Z | 2021-01-08T20:34:58.000Z | tests/unit/file_handling_tests/test_config_secret_file_usage.py | LaudateCorpus1/vault-anyconfig | 5292319c483a56108d6a1d6888520c964ae185b2 | [
"Apache-2.0"
] | 5 | 2019-03-29T14:59:08.000Z | 2021-09-14T04:04:07.000Z | from unittest.mock import patch, mock_open, call, Mock
from pytest import fixture, warns
from copy import deepcopy
from json import dumps as jdumps
from stat import S_IRUSR, S_IWUSR
from os.path import abspath
from vault_anyconfig.vault_anyconfig import VaultAnyConfig
@patch("vault_anyconfig.vault_anyconfig.chmod")
@patch("builtins.open", new_callable=mock_open)
@patch("vault_anyconfig.vault_anyconfig.dump_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_dump(
mock_hvac_client_read,
mock_dump,
mock_open_handle,
mock_chmod,
localhost_client,
gen_input_config,
gen_processed_config,
gen_vault_response_kv1,
file_path,
file_path_normalized,
file_contents,
secret_path,
):
"""
Basic test of the dump function with secret file writing
"""
mock_hvac_client_read.return_value = gen_vault_response_kv1()
localhost_client.dump(gen_input_config(), "out.json", process_secret_files=True)
mock_dump.assert_called_once_with(gen_input_config(), "out.json")
mock_hvac_client_read.assert_called_once_with(secret_path)
mock_open_handle.assert_called_once_with(file_path_normalized, "w")
mock_open_handle().write.assert_called_once_with(file_contents)
mock_chmod.assert_called_once_with(file_path_normalized, S_IRUSR)
@patch("vault_anyconfig.vault_anyconfig.chmod")
@patch("builtins.open", new_callable=mock_open)
@patch("vault_anyconfig.vault_anyconfig.dumps_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_dumps(
mock_hvac_client_read,
mock_dumps,
mock_open_handle,
mock_chmod,
localhost_client,
gen_input_config,
gen_processed_config,
gen_vault_response_kv1,
file_path,
file_path_normalized,
file_contents,
secret_path,
):
"""
Basic test of the dumps function with secret file writing
"""
mock_hvac_client_read.return_value = gen_vault_response_kv1()
localhost_client.dumps(gen_input_config(), process_secret_files=True)
mock_dumps.assert_called_once_with(gen_input_config())
mock_hvac_client_read.assert_called_once_with(secret_path)
mock_open_handle.assert_called_once_with(file_path_normalized, "w")
mock_open_handle().write.assert_called_once_with(file_contents)
mock_chmod.assert_called_once_with(file_path_normalized, S_IRUSR)
@patch("vault_anyconfig.vault_anyconfig.chmod")
@patch("builtins.open", new_callable=mock_open)
@patch("vault_anyconfig.vault_anyconfig.load_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_load(
mock_hvac_client_read,
mock_load,
mock_open_handle,
mock_chmod,
localhost_client,
gen_input_config,
gen_processed_config,
gen_vault_response_kv1,
file_path,
file_path_normalized,
file_contents,
secret_path,
):
"""
Basic test of the load function with file writing
"""
mock_load.return_value = gen_input_config()
mock_hvac_client_read.return_value = gen_vault_response_kv1()
assert localhost_client.load("in.json", process_secret_files=True) == gen_processed_config()
mock_load.assert_called_once_with("in.json")
mock_hvac_client_read.assert_called_once_with(secret_path)
mock_open_handle.assert_called_once_with(file_path_normalized, "w")
mock_open_handle().write.assert_called_once_with(file_contents)
mock_chmod.assert_called_once_with(file_path_normalized, S_IRUSR)
@patch("vault_anyconfig.vault_anyconfig.chmod")
@patch("builtins.open", new_callable=mock_open)
@patch("vault_anyconfig.vault_anyconfig.loads_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_loads(
mock_hvac_client_read,
mock_loads,
mock_open_handle,
mock_chmod,
localhost_client,
gen_input_config,
gen_processed_config,
gen_vault_response_kv1,
file_path,
file_path_normalized,
file_contents,
secret_path,
):
"""
Basic test of the loads function with file writing
"""
mock_loads.return_value = gen_input_config()
mock_hvac_client_read.return_value = gen_vault_response_kv1()
input_config_json = jdumps(gen_input_config())
assert localhost_client.loads(input_config_json, process_secret_files=True) == gen_processed_config()
mock_loads.assert_called_once_with(jdumps(gen_input_config()))
mock_hvac_client_read.assert_called_once_with(secret_path)
mock_open_handle.assert_called_once_with(file_path_normalized, "w")
mock_open_handle().write.assert_called_once_with(file_contents)
mock_chmod.assert_called_once_with(file_path_normalized, S_IRUSR)
@patch("vault_anyconfig.vault_anyconfig.chmod")
@patch("builtins.open", new_callable=mock_open)
@patch("vault_anyconfig.vault_anyconfig.dump_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_dump_config_file_reference(
mock_hvac_client_read,
mock_dump,
mock_open_handle,
mock_chmod,
localhost_client,
gen_input_config,
gen_processed_config,
gen_vault_response_kv1,
file_path,
file_path_normalized,
file_contents,
secret_path,
):
"""
Tests that the vault_files section can reference a file specified in the configuration
"""
by_ref_file_path = "acme.cert_path"
input_config = gen_input_config({by_ref_file_path: secret_path})
mock_hvac_client_read.return_value = gen_vault_response_kv1()
localhost_client.dump(input_config, "out.json", process_secret_files=True)
mock_hvac_client_read.assert_called_once_with(secret_path)
mock_open_handle.assert_called_once_with(file_path_normalized, "w")
mock_open_handle().write.assert_called_once_with(file_contents)
mock_chmod.assert_called_once_with(file_path_normalized, S_IRUSR)
@patch("vault_anyconfig.vault_anyconfig.dump_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_dump_disable_vault_files(
mock_hvac_client_read, mock_dump, localhost_client, gen_vault_response_kv1, gen_input_config
):
"""
Ensure when process_secret_files is set to false, mock_hvac_client is never called (and thus the code for writing files was not triggered)
"""
mock_hvac_client_read.return_value = gen_input_config()
localhost_client.dump(gen_input_config(), "out.json", process_secret_files=False)
mock_hvac_client_read.assert_not_called()
mock_dump.assert_called_with(gen_input_config(), "out.json")
@patch("vault_anyconfig.vault_anyconfig.dumps_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_dumps_disable_vault_files(mock_hvac_client_read, mock_dumps, localhost_client, gen_input_config):
"""
Ensure when process_secret_files is set to false, mock_hvac_client is never called (and thus the code for writing files was not triggered)
"""
localhost_client.dumps(gen_input_config(), process_secret_files=False)
mock_hvac_client_read.assert_not_called()
mock_dumps.assert_called_with(gen_input_config())
@patch("vault_anyconfig.vault_anyconfig.load_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_load_disable_vault_files(
mock_hvac_client_read, mock_load, localhost_client, gen_input_config, gen_processed_config
):
"""
Ensure when process_secret_files is set to false, mock_hvac_client is never called (and thus the code for writing files was not triggered)
"""
mock_load.return_value = gen_input_config()
assert localhost_client.load("in.json", process_secret_files=False) == gen_processed_config()
mock_hvac_client_read.assert_not_called()
mock_load.assert_called_with("in.json")
@patch("vault_anyconfig.vault_anyconfig.loads_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_loads_disable_vault_files(mock_hvac_client_read, mock_loads, localhost_client, gen_input_config):
"""
Ensure when process_secret_files is set to false, mock_hvac_client is never called (and thus the code for writing files was not triggered)
"""
mock_loads.return_value = gen_input_config()
string_raw_config = jdumps(gen_input_config())
localhost_client.loads(string_raw_config, process_secret_files=False)
mock_hvac_client_read.assert_not_called()
mock_loads.assert_called_with(string_raw_config)
@patch("vault_anyconfig.vault_anyconfig.chmod")
@patch("builtins.open", new_callable=mock_open)
@patch("vault_anyconfig.vault_anyconfig.dump_base")
@patch("vault_anyconfig.vault_anyconfig.Client.read")
def test_dump_passthrough(
mock_hvac_client_read,
mock_dump,
mock_open_handle,
mock_chmod,
localhost_client,
gen_input_config,
gen_processed_config,
gen_vault_response_kv1,
file_path,
file_path_normalized,
file_contents,
secret_path,
):
"""
Tests a warning is thrown where there are files specified but the passthrough flag is set.
"""
mock_hvac_client_read.return_value = gen_vault_response_kv1()
with warns(UserWarning):
VaultAnyConfig().dump(gen_input_config(), "out.json", process_secret_files=True)
mock_dump.assert_called_once_with(gen_input_config(), "out.json")
mock_hvac_client_read.assert_not_called()
mock_open_handle.assert_not_called()
mock_chmod.assert_not_called()
| 33.071685 | 142 | 0.784545 | 1,304 | 9,227 | 5.075153 | 0.082055 | 0.114234 | 0.065579 | 0.114234 | 0.878664 | 0.863705 | 0.831067 | 0.81233 | 0.775008 | 0.733152 | 0 | 0.001622 | 0.131462 | 9,227 | 278 | 143 | 33.190647 | 0.824183 | 0.10285 | 0 | 0.756477 | 0 | 0 | 0.153439 | 0.131168 | 0 | 0 | 0 | 0 | 0.202073 | 1 | 0.051813 | false | 0.005181 | 0.036269 | 0 | 0.088083 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
73f33ea7c3207be42370191f03208f2dcc37e4bd | 2,488 | py | Python | extractor/TimeExtractor.py | awesome-archive/GeneralNewsExtractor | 5d08d51539e54f1bc852e3856cc34743b7b2cb08 | [
"MIT"
] | null | null | null | extractor/TimeExtractor.py | awesome-archive/GeneralNewsExtractor | 5d08d51539e54f1bc852e3856cc34743b7b2cb08 | [
"MIT"
] | null | null | null | extractor/TimeExtractor.py | awesome-archive/GeneralNewsExtractor | 5d08d51539e54f1bc852e3856cc34743b7b2cb08 | [
"MIT"
] | 1 | 2021-05-20T03:55:37.000Z | 2021-05-20T03:55:37.000Z | import re
from lxml.html import HtmlElement
class TimeExtractor:
def __init__(self):
self.time_pattern = [
"(\d{4}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[0-1]?[0-9]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{4}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[2][0-3]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{4}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[0-1]?[0-9]:[0-5]?[0-9])",
"(\d{4}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[2][0-3]:[0-5]?[0-9])",
"(\d{4}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[1-24]\d时[0-60]\d分)([1-24]\d时)",
"(\d{2}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[0-1]?[0-9]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{2}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[2][0-3]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{2}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[0-1]?[0-9]:[0-5]?[0-9])",
"(\d{2}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[2][0-3]:[0-5]?[0-9])",
"(\d{2}[-|/|.]\d{1,2}[-|/|.]\d{1,2}\s*?[1-24]\d时[0-60]\d分)([1-24]\d时)",
"(\d{4}年\d{1,2}月\d{1,2}日\s*?[0-1]?[0-9]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{4}年\d{1,2}月\d{1,2}日\s*?[2][0-3]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{4}年\d{1,2}月\d{1,2}日\s*?[0-1]?[0-9]:[0-5]?[0-9])",
"(\d{4}年\d{1,2}月\d{1,2}日\s*?[2][0-3]:[0-5]?[0-9])",
"(\d{4}年\d{1,2}月\d{1,2}日\s*?[1-24]\d时[0-60]\d分)([1-24]\d时)",
"(\d{2}年\d{1,2}月\d{1,2}日\s*?[0-1]?[0-9]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{2}年\d{1,2}月\d{1,2}日\s*?[2][0-3]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{2}年\d{1,2}月\d{1,2}日\s*?[0-1]?[0-9]:[0-5]?[0-9])",
"(\d{2}年\d{1,2}月\d{1,2}日\s*?[2][0-3]:[0-5]?[0-9])",
"(\d{2}年\d{1,2}月\d{1,2}日\s*?[1-24]\d时[0-60]\d分)([1-24]\d时)",
"(\d{1,2}月\d{1,2}日\s*?[0-1]?[0-9]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{1,2}月\d{1,2}日\s*?[2][0-3]:[0-5]?[0-9]:[0-5]?[0-9])",
"(\d{1,2}月\d{1,2}日\s*?[0-1]?[0-9]:[0-5]?[0-9])",
"(\d{1,2}月\d{1,2}日\s*?[2][0-3]:[0-5]?[0-9])",
"(\d{1,2}月\d{1,2}日\s*?[1-24]\d时[0-60]\d分)([1-24]\d时)",
"(\d{4}[-|/|.]\d{1,2}[-|/|.]\d{1,2})",
"(\d{2}[-|/|.]\d{1,2}[-|/|.]\d{1,2})",
"(\d{4}年\d{1,2}月\d{1,2}日)",
"(\d{2}年\d{1,2}月\d{1,2}日)",
"(\d{1,2}月\d{1,2}日)"
]
def extractor(self, element: HtmlElement):
text = ''.join(element.xpath('.//text()'))
for dt in self.time_pattern:
dt_obj = re.search(dt, text)
if dt_obj:
return dt_obj.group(1)
else:
return ''
| 51.833333 | 89 | 0.317122 | 583 | 2,488 | 1.337907 | 0.073756 | 0.153846 | 0.230769 | 0.153846 | 0.725641 | 0.725641 | 0.725641 | 0.725641 | 0.698718 | 0.698718 | 0 | 0.193188 | 0.209405 | 2,488 | 47 | 90 | 52.93617 | 0.203355 | 0 | 0 | 0 | 0 | 0.613636 | 0.651125 | 0.640273 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.045455 | 0 | 0.159091 | 0 | 0 | 0 | 1 | null | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
fb60fb064e311ddaa7cecd3740398ea200bde2f6 | 2,933 | py | Python | dags/inaturalist/postDw.py | mikkohei13/airflow | d7cf49b47fd5193e57e3b7025ecf9f92fdffb1d9 | [
"MIT"
] | null | null | null | dags/inaturalist/postDw.py | mikkohei13/airflow | d7cf49b47fd5193e57e3b7025ecf9f92fdffb1d9 | [
"MIT"
] | null | null | null | dags/inaturalist/postDw.py | mikkohei13/airflow | d7cf49b47fd5193e57e3b7025ecf9f92fdffb1d9 | [
"MIT"
] | null | null | null |
import requests
import json
from airflow.models import Variable
def postSingle(dwObs, target):
# dwObsJson = json.dumps(dwObs)
# print(dwObsJson)
# exit()
if "staging" == target:
print("Pushing to staging API")
targetUrl = "https://apitest.laji.fi/v0/warehouse/push?access_token=" + Variable.get("inat_staging_token")
elif "production" == target:
print("Pushing to production API")
targetUrl = "https://api.laji.fi/v0/warehouse/push?access_token=" + Variable.get("inat_production_token")
# Sending post request and saving the response as response object
print("Pushing to " + targetUrl)
targetResponse = requests.post(url = targetUrl, json = dwObs)
if 200 == targetResponse.status_code:
print("DW API responded " + str(targetResponse.status_code))
return True
else:
errorCode = str(targetResponse.status_code)
print(targetResponse.text)
raise Exception(f"DW API responded with error {errorCode}")
def postMulti(dwObs, target):
# dwObsJson = json.dumps(dwObs)
if "staging" == target:
print("Pushing to staging API.")
targetUrl = "https://apitest.laji.fi/v0/warehouse/push?access_token=" + Variable.get("inat_staging_token")
elif "production" == target:
print("Pushing to production API")
targetUrl = "https://api.laji.fi/v0/warehouse/push?access_token=" + Variable.get("inat_production_token")
# sending post request and saving the response as response object
print("Pushing to " + targetUrl)
targetResponse = requests.post(url = targetUrl, json = dwObs)
if 200 == targetResponse.status_code:
print("API responded " + str(targetResponse.status_code))
return True
else:
errorCode = str(targetResponse.status_code)
raise Exception(f"API responded with error {errorCode}")
def postSingleMock(dwObs, mock):
print("Pushing to mock API.")
airflowVariable_token = Variable.get("inat_mock_token")
targetUrl = "https://14935.mocklab.io/inat"
# Sending post request and saving the response as response object
targetResponse = requests.post(url = targetUrl, json = dwObs)
# print(targetResponse)
if 200 == targetResponse.status_code:
print("Mock API responded " + str(targetResponse.status_code))
return True
else:
errorCode = str(targetResponse.status_code)
raise Exception(f"Mock API responded with error {errorCode}")
def postMultiMock(dwObs, lastUpdateKey):
print("Pushing to mock API.")
airflowVariable_token = Variable.get("inat_mock_token")
targetUrl = "https://14935.mocklab.io/inat"
# sending post request and saving the response as response object
targetResponse = requests.post(url = targetUrl, json = dwObs)
if 200 == targetResponse.status_code:
print("Mock API responded " + str(targetResponse.status_code))
return True
else:
errorCode = str(targetResponse.status_code)
raise Exception(f"Mock API responded with error {errorCode}")
| 30.237113 | 110 | 0.724173 | 363 | 2,933 | 5.768595 | 0.192837 | 0.114613 | 0.137536 | 0.103152 | 0.899713 | 0.899713 | 0.834288 | 0.834288 | 0.834288 | 0.834288 | 0 | 0.010612 | 0.164678 | 2,933 | 96 | 111 | 30.552083 | 0.844082 | 0.125469 | 0 | 0.75 | 0 | 0 | 0.311765 | 0.016471 | 0 | 0 | 0 | 0 | 0 | 1 | 0.071429 | false | 0 | 0.053571 | 0 | 0.196429 | 0.232143 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fb64edcbdfbb18c93d2d96c9e8c41a54ba19c32e | 39,920 | py | Python | tests/components/flux/test_switch.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 1 | 2021-07-08T20:09:55.000Z | 2021-07-08T20:09:55.000Z | tests/components/flux/test_switch.py | pcaston/core | e74d946cef7a9d4e232ae9e0ba150d18018cfe33 | [
"Apache-2.0"
] | 47 | 2021-02-21T23:43:07.000Z | 2022-03-31T06:07:10.000Z | tests/components/flux/test_switch.py | OpenPeerPower/core | f673dfac9f2d0c48fa30af37b0a99df9dd6640ee | [
"Apache-2.0"
] | null | null | null | """The tests for the Flux switch platform."""
from unittest.mock import patch
import pytest
from openpeerpower.components import light, switch
from openpeerpower.const import (
ATTR_ENTITY_ID,
CONF_PLATFORM,
SERVICE_TURN_ON,
STATE_ON,
SUN_EVENT_SUNRISE,
)
from openpeerpower.core import State
from openpeerpower.setup import async_setup_component
import openpeerpower.util.dt as dt_util
from tests.common import (
assert_setup_component,
async_fire_time_changed,
async_mock_service,
mock_restore_cache,
)
async def test_valid_config(opp):
"""Test configuration."""
assert await async_setup_component(
opp,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
}
},
)
await opp.async_block_till_done()
state = opp.states.get("switch.flux")
assert state
assert state.state == "off"
async def test_restore_state_last_on(opp):
"""Test restoring state when the last state is on."""
mock_restore_cache(opp, [State("switch.flux", "on")])
assert await async_setup_component(
opp,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
}
},
)
await opp.async_block_till_done()
state = opp.states.get("switch.flux")
assert state
assert state.state == "on"
async def test_restore_state_last_off(opp):
"""Test restoring state when the last state is off."""
mock_restore_cache(opp, [State("switch.flux", "off")])
assert await async_setup_component(
opp,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
}
},
)
await opp.async_block_till_done()
state = opp.states.get("switch.flux")
assert state
assert state.state == "off"
async def test_valid_config_with_info(opp):
"""Test configuration."""
assert await async_setup_component(
opp,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
"stop_time": "22:59",
"start_time": "7:22",
"start_colortemp": "1000",
"sunset_colortemp": "2000",
"stop_colortemp": "4000",
}
},
)
await opp.async_block_till_done()
async def test_valid_config_no_name(opp):
"""Test configuration."""
with assert_setup_component(1, "switch"):
assert await async_setup_component(
opp,
"switch",
{"switch": {"platform": "flux", "lights": ["light.desk", "light.lamp"]}},
)
await opp.async_block_till_done()
async def test_invalid_config_no_lights(opp):
"""Test configuration."""
with assert_setup_component(0, "switch"):
assert await async_setup_component(
opp, "switch", {"switch": {"platform": "flux", "name": "flux"}}
)
await opp.async_block_till_done()
async def test_flux_when_switch_is_off(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch when it is off."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=10, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await opp.async_block_till_done()
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
assert not turn_on_calls
async def test_flux_before_sunrise(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch before sunrise."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=2, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=5)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
await opp.async_block_till_done()
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
async def test_flux_before_sunrise_known_location(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch before sunrise."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
opp.config.latitude = 55.948372
opp.config.longitude = -3.199466
opp.config.elevation = 17
test_time = dt_util.utcnow().replace(
hour=2, minute=0, second=0, day=21, month=6, year=2019
)
await opp.async_block_till_done()
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
# 'brightness': 255,
# 'disable_brightness_adjust': True,
# 'mode': 'rgb',
# 'interval': 120
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_after_sunrise_before_sunset(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch after sunrise and before sunset."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 173
assert call.data[light.ATTR_XY_COLOR] == [0.439, 0.37]
# pylint: disable=invalid-name
async def test_flux_after_sunset_before_stop(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch after sunset and before stop."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "22:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 146
assert call.data[light.ATTR_XY_COLOR] == [0.506, 0.385]
# pylint: disable=invalid-name
async def test_flux_after_stop_before_sunrise(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch after stop and before sunrise."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=23, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_with_custom_start_stop_times(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux with custom start and stop times."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"start_time": "6:00",
"stop_time": "23:30",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 147
assert call.data[light.ATTR_XY_COLOR] == [0.504, 0.385]
async def test_flux_before_sunrise_stop_next_day(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch before sunrise.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=2, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_after_sunrise_before_sunset_stop_next_day(
opp, legacy_patchable_time, enable_custom_integrations
):
"""
Test the flux switch after sunrise and before sunset.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 173
assert call.data[light.ATTR_XY_COLOR] == [0.439, 0.37]
# pylint: disable=invalid-name
@pytest.mark.parametrize("x", [0, 1])
async def test_flux_after_sunset_before_midnight_stop_next_day(
opp, legacy_patchable_time, x, enable_custom_integrations
):
"""Test the flux switch after sunset and before stop.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=23, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 119
assert call.data[light.ATTR_XY_COLOR] == [0.588, 0.386]
# pylint: disable=invalid-name
async def test_flux_after_sunset_after_midnight_stop_next_day(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch after sunset and before stop.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=00, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 114
assert call.data[light.ATTR_XY_COLOR] == [0.601, 0.382]
# pylint: disable=invalid-name
async def test_flux_after_stop_before_sunrise_stop_next_day(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch after stop and before sunrise.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=2, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_with_custom_colortemps(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux with custom start and stop colortemps."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"start_colortemp": "1000",
"stop_colortemp": "6000",
"stop_time": "22:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 159
assert call.data[light.ATTR_XY_COLOR] == [0.469, 0.378]
# pylint: disable=invalid-name
async def test_flux_with_custom_brightness(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux with custom start and stop colortemps."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"brightness": 255,
"stop_time": "22:00",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 255
assert call.data[light.ATTR_XY_COLOR] == [0.506, 0.385]
async def test_flux_with_multiple_lights(
opp, legacy_patchable_time, enable_custom_integrations
):
"""Test the flux switch with multiple light entities."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
await opp.services.async_call(
light.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ent2.entity_id}, blocking=True
)
await opp.services.async_call(
light.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ent3.entity_id}, blocking=True
)
await opp.async_block_till_done()
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
state = opp.states.get(ent2.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
state = opp.states.get(ent3.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=12, minute=0, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
print(f"sunrise {sunrise_time}")
return sunrise_time
print(f"sunset {sunset_time}")
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id, ent2.entity_id, ent3.entity_id],
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 163
assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376]
call = turn_on_calls[-2]
assert call.data[light.ATTR_BRIGHTNESS] == 163
assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376]
call = turn_on_calls[-3]
assert call.data[light.ATTR_BRIGHTNESS] == 163
assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376]
async def test_flux_with_mired(opp, legacy_patchable_time, enable_custom_integrations):
"""Test the flux switch´s mode mired."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("color_temp") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"mode": "mired",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_COLOR_TEMP] == 269
async def test_flux_with_rgb(opp, legacy_patchable_time, enable_custom_integrations):
"""Test the flux switch´s mode rgb."""
platform = getattr(opp.components, "test.light")
platform.init()
assert await async_setup_component(
opp, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await opp.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = opp.states.get(ent1.entity_id)
assert state.state == STATE_ON
assert state.attributes.get("color_temp") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(opp, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"openpeerpower.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"openpeerpower.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
opp,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"mode": "rgb",
}
},
)
await opp.async_block_till_done()
turn_on_calls = async_mock_service(opp, light.DOMAIN, SERVICE_TURN_ON)
await opp.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(opp, test_time)
await opp.async_block_till_done()
call = turn_on_calls[-1]
rgb = (255, 198, 152)
rounded_call = tuple(map(round, call.data[light.ATTR_RGB_COLOR]))
assert rounded_call == rgb
| 33.183707 | 87 | 0.618662 | 4,894 | 39,920 | 4.791377 | 0.041275 | 0.028317 | 0.033264 | 0.046057 | 0.950147 | 0.945328 | 0.938292 | 0.929976 | 0.922513 | 0.913813 | 0 | 0.019549 | 0.274724 | 39,920 | 1,202 | 88 | 33.211314 | 0.790281 | 0.02242 | 0 | 0.782038 | 0 | 0 | 0.096333 | 0.045543 | 0 | 0 | 0 | 0 | 0.140262 | 1 | 0.016145 | false | 0 | 0.008073 | 0 | 0.056509 | 0.002018 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fb765bcc9c8ab515d8bb77763857dd5a54a1a313 | 11,002 | py | Python | app.py | Kirito56/ExpertaSystem | 6b7b9b9cad647b6019b378e731a862beaacd1158 | [
"MIT"
] | 1 | 2022-02-28T12:47:56.000Z | 2022-02-28T12:47:56.000Z | app.py | Kirito56/ExpertaSystem | 6b7b9b9cad647b6019b378e731a862beaacd1158 | [
"MIT"
] | null | null | null | app.py | Kirito56/ExpertaSystem | 6b7b9b9cad647b6019b378e731a862beaacd1158 | [
"MIT"
] | null | null | null | from core.Asserts import Asserts
from core.DefFact import DefFact
from models.Kebab import Kebab as MKebab, Model
from models.Rules import Rules as MRules, Model as RModel
from core.ES import Kebab
__version__ = '0.9'
def start(engine: int,
delfacts: bool = False,
numoffact: bool = False,
modify: bool = False,
clear: bool = False,
rules: bool = False):
if engine == 1:
e = DefFact()
e.reset()
e.init_kebab()
if delfacts and numoffact:
num = int(input('Виберіть вакт для видалення: '))
import os
os.remove(f'{os.getcwd()}/ES/ES{num}.json')
e.retract(num)
elif modify and numoffact:
num = int(input('Виберіть факт щоб змінити на випадкове значення: '))
import random
Type = random.choice(['Свинина', 'Курка', 'Страусятина', 'Кенгурятина'])
Action = random.choice(['Чекати', 'Перевернути', 'Забрати'])
DegreeOfRoastiness = random.choice(
['Сире', 'Починає піджарюватись', 'Піджарилось', 'Приговлений', 'Згорівший'])
AlreadyTurnedOver = random.choice([True, False])
PartyReady = random.choice([True, False])
DoneOnOneSide = random.choice([True, False])
DoneOnBothSides = random.choice([True, False])
BothSideReady = random.choice([True, False])
Time = random.randint(0, 30)
DoneAToTheMajority = random.choice([True, False])
NumberOfPeople = random.choice([["Іра", "Яна", "Влада"],
["Володя", "Женя", "Влад"], ["Ярослав", "Вадім", "Олесь"]])
e.declare(e.modify(e.facts[num], Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople))
data = dict(
Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople
)
MKebab.upd_by_id(num, data)
elif modify and numoffact and rules:
num = int(input('Виберіть факт щоб змінити на випадкове значення: '))
import random
Type = random.choice(['Свинина', 'Курка', 'Страусятина', 'Кенгурятина'])
Action = random.choice(['Чекати', 'Перевернути', 'Забрати'])
DegreeOfRoastiness = random.choice(
['Сире', 'Починає піджарюватись', 'Піджарилось', 'Приговлений', 'Згорівший'])
AlreadyTurnedOver = random.choice([True, False])
PartyReady = random.choice([True, False])
DoneOnOneSide = random.choice([True, False])
DoneOnBothSides = random.choice([True, False])
BothSideReady = random.choice([True, False])
Time = random.randint(0, 30)
DoneAToTheMajority = random.choice([True, False])
NumberOfPeople = random.choice([["Іра", "Яна", "Влада"],
["Володя", "Женя", "Влад"], ["Ярослав", "Вадім", "Олесь"]])
e.declare(e.modify(e.facts[num], Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople))
data = dict(
Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople
)
MKebab.upd_by_id(num, data)
e.run()
elif clear:
e.facts.clear()
elif rules:
e.run()
e.avarage(0, 30)
e.avarage3(0, 10, 30)
elif engine == 2:
e = Asserts()
e.reset()
e.init_kebab()
if delfacts and numoffact:
num = int(input('Виберіть вакт для видалення: '))
import os
os.remove(f'{os.getcwd()}/ES/ES{num}.json')
e.retract(num)
elif modify and numoffact:
num = int(input('Виберіть факт щоб змінити на випадкове значення: '))
import random
Type = random.choice(['Свинина', 'Курка', 'Страусятина', 'Кенгурятина'])
Action = random.choice(['Чекати', 'Перевернути', 'Забрати'])
DegreeOfRoastiness = random.choice(
['Сире', 'Починає піджарюватись', 'Піджарилось', 'Приговлений', 'Згорівший'])
AlreadyTurnedOver = random.choice([True, False])
PartyReady = random.choice([True, False])
DoneOnOneSide = random.choice([True, False])
DoneOnBothSides = random.choice([True, False])
BothSideReady = random.choice([True, False])
Time = random.randint(0, 30)
DoneAToTheMajority = random.choice([True, False])
NumberOfPeople = random.choice([["Іра", "Яна", "Влада"],
["Володя", "Женя", "Влад"], ["Ярослав", "Вадім", "Олесь"]])
e.declare(e.modify(e.facts[num], Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople))
data = dict(
Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople
)
MKebab.upd_by_id(num, data)
Kebab.to_json(MKebab, MRules)
elif modify and numoffact and rules:
num = int(input('Виберіть факт щоб змінити на випадкове значення: '))
import random
Type = random.choice(['Свинина', 'Курка', 'Страусятина', 'Кенгурятина'])
Action = random.choice(['Чекати', 'Перевернути', 'Забрати'])
DegreeOfRoastiness = random.choice(
['Сире', 'Починає піджарюватись', 'Піджарилось', 'Приговлений', 'Згорівший'])
AlreadyTurnedOver = random.choice([True, False])
PartyReady = random.choice([True, False])
DoneOnOneSide = random.choice([True, False])
DoneOnBothSides = random.choice([True, False])
BothSideReady = random.choice([True, False])
Time = random.randint(0, 30)
DoneAToTheMajority = random.choice([True, False])
NumberOfPeople = random.choice([["Іра", "Яна", "Влада"],
["Володя", "Женя", "Влад"], ["Ярослав", "Вадім", "Олесь"]])
e.declare(e.modify(e.facts[num], Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople))
data = dict(
Type=Type,
Action=Action,
DegreeOfRoastiness=DegreeOfRoastiness,
AlreadyTurnedOver=AlreadyTurnedOver,
PartyReady=PartyReady,
DoneOnOneSide=DoneOnOneSide,
DoneOnBothSides=DoneOnBothSides,
BothSideReady=BothSideReady, Time=Time, DoneAToTheMajority=DoneAToTheMajority,
NumberOfPeople=NumberOfPeople
)
MKebab.upd_by_id(num, data)
e.run()
elif clear:
e.facts.clear()
elif rules:
e.run()
e.avarage(0, 30)
e.avarage3(0, 10, 30)
if __name__ == "__main__":
from core.config import db
engine = int(input(f'Engine:\n1.\tDefFacts\n2.\tAsserts\nВаш вибір:\t{int}\t-\t'))
delete = bool(input(f'Delete\tFact:\nEmpty.\tNo\n1.\tYes\nВаш вибір:\t{int}\t-\t'))
modify = bool(input(f'Modify:\nEmpty.\tNo\n1.\tYes\nВаш вибір:\t{int}\t-\t'))
clear = bool(input(f'Clear:\nEmpty.\tNo\n1.\tYes\nВаш вибір:\t{int}\t-\t'))
rules = bool(input(f'Rules:\nEmpty.\tNo\n1.\tYes\nВаш вибір:\t{int}\t-\t'))
table = bool(input(f'DataBase:\nEmpty.\tCreate\n1.\tReWrite\nВаш вибір:\t{int}\t-\t'))
if table:
Model.metadata.drop_all(db)
RModel.metadata.drop_all(db)
Model.metadata.create_all(db)
RModel.metadata.create_all(db)
else:
Model.metadata.create_all(db)
RModel.metadata.create_all(db)
print(f'Engine:\t{engine}\nВидалення:\t{delete}\nРедагування:\t{modify}\nОчищення:\t{clear}\nПравила:\t{rules}')
if delete or modify or clear or rules:
numoffact = True
start(engine, modify=modify, numoffact=numoffact, delfacts=delete, clear=clear, rules=rules)
Kebab.to_json(MKebab, MRules)
| 49.336323 | 116 | 0.549536 | 926 | 11,002 | 6.49676 | 0.144708 | 0.079787 | 0.06383 | 0.083777 | 0.872008 | 0.864362 | 0.859375 | 0.859375 | 0.859375 | 0.859375 | 0 | 0.005673 | 0.343119 | 11,002 | 222 | 117 | 49.558559 | 0.826761 | 0 | 0 | 0.831776 | 0 | 0.009346 | 0.125523 | 0.034357 | 0 | 0 | 0 | 0 | 0.014019 | 1 | 0.004673 | false | 0 | 0.056075 | 0 | 0.060748 | 0.004673 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
fbccadc722b9345953d44b66ff79fb494d06c88e | 210 | py | Python | models/__init__.py | ZhenyuZhangUSTC/loss-landscape | debe6373714372c040b29a55dffa96cff99e2b8f | [
"MIT"
] | 4 | 2022-03-09T00:58:35.000Z | 2022-03-31T07:34:07.000Z | models/__init__.py | ZhenyuZhangUSTC/loss-landscape | debe6373714372c040b29a55dffa96cff99e2b8f | [
"MIT"
] | null | null | null | models/__init__.py | ZhenyuZhangUSTC/loss-landscape | debe6373714372c040b29a55dffa96cff99e2b8f | [
"MIT"
] | null | null | null | from models.resnet import *
from models.mobilenetv2 import *
from models.alexnet import *
from models.htbd_alexnet import *
from models.vgg import *
from models.clbd_resnet import *
from models.resnets import * | 30 | 33 | 0.804762 | 30 | 210 | 5.566667 | 0.333333 | 0.419162 | 0.57485 | 0.263473 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005464 | 0.128571 | 210 | 7 | 34 | 30 | 0.907104 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
fbe096b45f1ed34f6f42f175bb68929f1a9ed18a | 20,310 | py | Python | st2tests/integration/orquesta/test_wiring_pause_and_resume.py | shusugmt/st2 | 31da26badfb4ca3fb3e8cae07cfeec4791191afd | [
"Apache-2.0"
] | null | null | null | st2tests/integration/orquesta/test_wiring_pause_and_resume.py | shusugmt/st2 | 31da26badfb4ca3fb3e8cae07cfeec4791191afd | [
"Apache-2.0"
] | null | null | null | st2tests/integration/orquesta/test_wiring_pause_and_resume.py | shusugmt/st2 | 31da26badfb4ca3fb3e8cae07cfeec4791191afd | [
"Apache-2.0"
] | null | null | null | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
from integration.orquesta import base
from st2common.constants import action as ac_const
class PauseResumeWiringTest(base.TestWorkflowExecution, base.WorkflowControlTestCaseMixin):
temp_file_path_x = None
temp_file_path_y = None
def setUp(self):
super(PauseResumeWiringTest, self).setUp()
# Create temporary files used by the tests
self.temp_file_path_x = self._create_temp_file()
self.temp_file_path_y = self._create_temp_file()
def tearDown(self):
# Delete temporary files.
self._delete_temp_file(self.temp_file_path_x)
self._delete_temp_file(self.temp_file_path_y)
super(PauseResumeWiringTest, self).tearDown()
def test_pause_and_resume(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_file_path_x
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path}
ex = self._execute_workflow('examples.orquesta-test-pause', params)
self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
# Cancel the workflow before the temp file is deleted. The workflow will be paused
# but task1 will still be running to allow for graceful exit.
self.st2client.liveactions.pause(ex.id)
# Expecting the ex to be canceling, waiting for task1 to be completed.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the ex to be canceled.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
# Resume the ex.
ex = self.st2client.liveactions.resume(ex.id)
# Wait for completion.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
def test_pause_and_resume_cascade_to_subworkflow(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_file_path_x
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path}
ex = self._execute_workflow('examples.orquesta-test-pause-subworkflow', params)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk_exs = self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the workflow before the temp file is deleted. The workflow will be paused
# but task1 will still be running to allow for graceful exit.
ex = self.st2client.liveactions.pause(ex.id)
# Expecting the ex to be pausing, waiting for task1 to be completed.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk_ac_ex = self._wait_for_state(tk_exs[0], ac_const.LIVEACTION_STATUS_PAUSING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the exs to be paused.
tk_ac_ex = self._wait_for_state(tk_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
# Resume the parent ex.
ex = self.st2client.liveactions.resume(ex.id)
# Wait for completion.
tk_ac_ex = self._wait_for_state(tk_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
def test_pause_and_resume_cascade_to_subworkflows(self):
# Temp files are created during test setup. Ensure the temp files exist.
path1 = self.temp_file_path_x
self.assertTrue(os.path.exists(path1))
path2 = self.temp_file_path_y
self.assertTrue(os.path.exists(path2))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'file1': path1, 'file2': path2}
ex = self._execute_workflow('examples.orquesta-test-pause-subworkflows', params)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_exs = self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
tk2_exs = self._wait_for_task(ex, 'task2', ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the workflow before the temp files are deleted. The workflow will be paused
# but task1 will still be running to allow for graceful exit.
ex = self.st2client.liveactions.pause(ex.id)
# Expecting the ex to be pausing, waiting for task1 to be completed.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk1_ac_ex = self._wait_for_state(tk1_exs[0], ac_const.LIVEACTION_STATUS_PAUSING)
tk2_ac_ex = self._wait_for_state(tk2_exs[0], ac_const.LIVEACTION_STATUS_PAUSING)
# Delete the temporary file for one of the subworkflow.
os.remove(path1)
self.assertFalse(os.path.exists(path1))
# Check the workflow and subworkflow status.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk1_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSING)
# Delete the temporary file for the other subworkflow.
os.remove(path2)
self.assertFalse(os.path.exists(path2))
# Check the workflow and subworkflow status.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk1_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
# Resume the parent ex.
ex = self.st2client.liveactions.resume(ex.id)
# Wait for completion.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
def test_pause_and_resume_cascade_from_subworkflow(self):
# A temp file is created during test setup. Ensure the temp file exists.
path = self.temp_file_path_x
self.assertTrue(os.path.exists(path))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'tempfile': path}
ex = self._execute_workflow('examples.orquesta-test-pause-subworkflow', params)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk_exs = self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the subworkflow before the temp file is deleted. The task will be
# paused but workflow will still be running.
tk_ac_ex = self.st2client.liveactions.pause(tk_exs[0].id)
# Expecting the workflow is still running and task1 is pausing.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk_ac_ex = self._wait_for_state(tk_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
# Delete the temporary file.
os.remove(path)
self.assertFalse(os.path.exists(path))
# Wait for the workflow and task to be paused.
tk_ac_ex = self._wait_for_state(tk_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
# Resume the task.
tk_ac_ex = self.st2client.liveactions.resume(tk_ac_ex.id)
# Wait for completion.
tk_ac_ex = self._wait_for_state(tk_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
def test_pause_from_1_of_2_subworkflows_and_resume_subworkflow_when_workflow_paused(self):
# Temp files are created during test setup. Ensure the temp files exist.
path1 = self.temp_file_path_x
self.assertTrue(os.path.exists(path1))
path2 = self.temp_file_path_y
self.assertTrue(os.path.exists(path2))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'file1': path1, 'file2': path2}
ex = self._execute_workflow('examples.orquesta-test-pause-subworkflows', params)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_exs = self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
tk2_exs = self._wait_for_task(ex, 'task2', ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the subworkflow before the temp file is deleted. The task will be
# paused but workflow and the other subworkflow will still be running.
tk1_ac_ex = self.st2client.liveactions.pause(tk1_exs[0].id)
# Expecting the workflow is still running and task1 is pausing.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk2_ac_ex = self._wait_for_state(tk2_exs[0], ac_const.LIVEACTION_STATUS_RUNNING)
# Delete the temporary file for the subworkflow.
os.remove(path1)
self.assertFalse(os.path.exists(path1))
# Wait for the subworkflow to pause while the workflow
# and the other subworkflow will still be running.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_RUNNING)
# Delete the temporary file for the other subworkflow.
os.remove(path2)
self.assertFalse(os.path.exists(path2))
# The workflow will now be paused because no other task is running.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
# Resume the subworkflow.
tk1_ac_ex = self.st2client.liveactions.resume(tk1_ac_ex.id)
# Wait for completion.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
def test_pause_from_1_of_2_subworkflows_and_resume_subworkflow_while_workflow_running(self):
# Temp files are created during test setup. Ensure the temp files exist.
path1 = self.temp_file_path_x
self.assertTrue(os.path.exists(path1))
path2 = self.temp_file_path_y
self.assertTrue(os.path.exists(path2))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'file1': path1, 'file2': path2}
ex = self._execute_workflow('examples.orquesta-test-pause-subworkflows', params)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_exs = self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
tk2_exs = self._wait_for_task(ex, 'task2', ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the subworkflow before the temp file is deleted. The task will be
# paused but workflow and the other subworkflow will still be running.
tk1_ac_ex = self.st2client.liveactions.pause(tk1_exs[0].id)
# Expecting the workflow is still running and task1 is pausing.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk2_ac_ex = self._wait_for_state(tk2_exs[0], ac_const.LIVEACTION_STATUS_RUNNING)
# Delete the temporary file for the subworkflow.
os.remove(path1)
self.assertFalse(os.path.exists(path1))
# Wait for the subworkflow to pause while the workflow
# and the other subworkflow will still be running.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_RUNNING)
# Resume the subworkflow.
tk1_ac_ex = self.st2client.liveactions.resume(tk1_ac_ex.id)
# The subworkflow will succeed while the other subworkflow is still running.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_RUNNING)
# Delete the temporary file for the other subworkflow.
os.remove(path2)
self.assertFalse(os.path.exists(path2))
# Wait for completion.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
def test_pause_from_all_subworkflows_and_resume_from_subworkflows(self):
# Temp files are created during test setup. Ensure the temp files exist.
path1 = self.temp_file_path_x
self.assertTrue(os.path.exists(path1))
path2 = self.temp_file_path_y
self.assertTrue(os.path.exists(path2))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'file1': path1, 'file2': path2}
ex = self._execute_workflow('examples.orquesta-test-pause-subworkflows', params)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_exs = self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
tk2_exs = self._wait_for_task(ex, 'task2', ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the subworkflow before the temp file is deleted. The task will be
# paused but workflow and the other subworkflow will still be running.
tk1_ac_ex = self.st2client.liveactions.pause(tk1_exs[0].id)
# Expecting the workflow is still running and task1 is pausing.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk2_ac_ex = self._wait_for_state(tk2_exs[0], ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the other subworkflow before the temp file is deleted. The main
# workflow will still running because pause is initiated downstream.
tk2_ac_ex = self.st2client.liveactions.pause(tk2_exs[0].id)
# Expecting workflow and subworkflows are pausing.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
# Delete the temporary files for the subworkflows.
os.remove(path1)
self.assertFalse(os.path.exists(path1))
os.remove(path2)
self.assertFalse(os.path.exists(path2))
# Wait for subworkflows to pause. The main workflow will also
# pause now because no other task is running.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
# Resume the subworkflow.
tk1_ac_ex = self.st2client.liveactions.resume(tk1_ac_ex.id)
# The subworkflow will succeed while the other subworkflow is still paused.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
# Resume the other subworkflow.
tk2_ac_ex = self.st2client.liveactions.resume(tk2_ac_ex.id)
# Wait for completion.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
def test_pause_from_all_subworkflows_and_resume_from_parent_workflow(self):
# Temp files are created during test setup. Ensure the temp files exist.
path1 = self.temp_file_path_x
self.assertTrue(os.path.exists(path1))
path2 = self.temp_file_path_y
self.assertTrue(os.path.exists(path2))
# Launch the workflow. The workflow will wait for the temp file to be deleted.
params = {'file1': path1, 'file2': path2}
ex = self._execute_workflow('examples.orquesta-test-pause-subworkflows', params)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_exs = self._wait_for_task(ex, 'task1', ac_const.LIVEACTION_STATUS_RUNNING)
tk2_exs = self._wait_for_task(ex, 'task2', ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the subworkflow before the temp file is deleted. The task will be
# paused but workflow and the other subworkflow will still be running.
tk1_ac_ex = self.st2client.liveactions.pause(tk1_exs[0].id)
# Expecting the workflow is still running and task1 is pausing.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk2_ac_ex = self._wait_for_state(tk2_exs[0], ac_const.LIVEACTION_STATUS_RUNNING)
# Pause the other subworkflow before the temp file is deleted. The main
# workflow will still running because pause is initiated downstream.
tk2_ac_ex = self.st2client.liveactions.pause(tk2_exs[0].id)
# Expecting workflow and subworkflows are pausing.
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_RUNNING)
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_PAUSING)
# Delete the temporary files for the subworkflows.
os.remove(path1)
self.assertFalse(os.path.exists(path1))
os.remove(path2)
self.assertFalse(os.path.exists(path2))
# Wait for subworkflows to pause. The main workflow will also
# pause now because no other task is running.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_PAUSED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_PAUSED)
# Resume the parent workflow.
ex = self.st2client.liveactions.resume(ex.id)
# Wait for completion.
tk1_ac_ex = self._wait_for_state(tk1_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
tk2_ac_ex = self._wait_for_state(tk2_ac_ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
ex = self._wait_for_state(ex, ac_const.LIVEACTION_STATUS_SUCCEEDED)
| 50.522388 | 96 | 0.723732 | 2,970 | 20,310 | 4.619529 | 0.060943 | 0.061735 | 0.078571 | 0.164286 | 0.912391 | 0.91035 | 0.89898 | 0.896793 | 0.888192 | 0.888192 | 0 | 0.014813 | 0.202265 | 20,310 | 401 | 97 | 50.648379 | 0.831996 | 0.279025 | 0 | 0.831731 | 0 | 0 | 0.031095 | 0.021533 | 0 | 0 | 0 | 0 | 0.125 | 1 | 0.048077 | false | 0 | 0.019231 | 0 | 0.081731 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
fbe1819f5589a31f23b888b94ed1c061437c1238 | 11,929 | py | Python | inv_cov.py | CosmoLike/LSSTC_obsstrat | a4a470b01fb85cd0f461a3a82a407e84b30dd5d1 | [
"MIT"
] | null | null | null | inv_cov.py | CosmoLike/LSSTC_obsstrat | a4a470b01fb85cd0f461a3a82a407e84b30dd5d1 | [
"MIT"
] | null | null | null | inv_cov.py | CosmoLike/LSSTC_obsstrat | a4a470b01fb85cd0f461a3a82a407e84b30dd5d1 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import sys
import math, numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from numpy import linalg as LA
import numpy as np
#uncomment below if you want to invert the DESC SRD Y1 covariance
infile =['cov/cov_LSST_Y1_area7.500000e+03_ng9.800000e+00_nl1.500000e+01','cov/cov_LSST_Y1_area1.300000e+04_ng1.210000e+01_nl2.000000e+01','cov/cov_LSST_Y1_area1.600000e+04_ng1.510000e+01_nl2.500000e+01','cov/cov_LSST_Y3_area1.000000e+04_ng1.510000e+01_nl2.500000e+01','cov/cov_LSST_Y3_area1.500000e+04_ng1.890000e+01_nl3.200000e+01','cov/cov_LSST_Y3_area2.000000e+04_ng2.350000e+01_nl4.100000e+01','cov/cov_LSST_Y6_area1.000000e+04_ng2.030000e+01_nl3.500000e+01','cov/cov_LSST_Y6_area1.500000e+04_ng2.350000e+01_nl4.100000e+01','cov/cov_LSST_Y6_area2.000000e+04_ng2.690000e+01_nl4.800000e+01','cov/cov_LSST_Y10_area1.000000e+04_ng2.690000e+01_nl4.800000e+01','cov/cov_LSST_Y10_area1.500000e+04_ng3.080000e+01_nl5.700000e+01','cov/cov_LSST_Y10_area2.000000e+04_ng3.500000e+01_nl6.700000e+01']
data= ['datav/3x2pt_clusterN_clusterWL_LSST_Y1_area7.500000e+03_ng9.800000e+00_nl1.500000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y1_area1.300000e+04_ng1.210000e+01_nl2.000000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y1_area1.600000e+04_ng1.510000e+01_nl2.500000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y3_area1.000000e+04_ng1.510000e+01_nl2.500000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y3_area1.500000e+04_ng1.890000e+01_nl3.200000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y3_area2.000000e+04_ng2.350000e+01_nl4.100000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y6_area1.000000e+04_ng2.030000e+01_nl3.500000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y6_area1.500000e+04_ng2.350000e+01_nl4.100000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y6_area2.000000e+04_ng2.690000e+01_nl4.800000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y10_area1.000000e+04_ng2.690000e+01_nl4.800000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y10_area1.500000e+04_ng3.080000e+01_nl5.700000e+01','datav/3x2pt_clusterN_clusterWL_LSST_Y10_area2.000000e+04_ng3.500000e+01_nl6.700000e+01']
outname=['Y1_area7.500000e+03_ng9.800000e+00_nl1.500000e+01','Y1_area1.300000e+04_ng1.210000e+01_nl2.000000e+01','Y1_area1.600000e+04_ng1.510000e+01_nl2.500000e+01','Y3_area1.000000e+04_ng1.510000e+01_nl2.500000e+01','Y3_area1.500000e+04_ng1.890000e+01_nl3.200000e+01','Y3_area2.000000e+04_ng2.350000e+01_nl4.100000e+01','Y6_area1.000000e+04_ng2.030000e+01_nl3.500000e+01','Y6_area1.500000e+04_ng2.350000e+01_nl4.100000e+01','Y6_area2.000000e+04_ng2.690000e+01_nl4.800000e+01','Y10_area1.000000e+04_ng2.690000e+01_nl4.800000e+01','Y10_area1.500000e+04_ng3.080000e+01_nl5.700000e+01','Y10_area2.000000e+04_ng3.500000e+01_nl6.700000e+01']
for k in range(0,12):
#scenario Y10 20000 deg^2
if(k==11):
nggl = 25 # number of ggl power spectra
ngcl = 11 # number of cluster-source galaxy power spectra
nlens = 10 # number of lens bins
nlenscl= 4 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y10 15000 deg^2
if(k==10):
nggl = 25 # number of ggl power spectra
ngcl = 11 # number of cluster-source galaxy power spectra
nlens = 10 # number of lens bins
nlenscl= 4 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y10 10000 deg^2
if(k==9):
nggl = 25 # number of ggl power spectra
ngcl = 11 # number of cluster-source galaxy power spectra
nlens = 10 # number of lens bins
nlenscl= 4 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y6 20000 deg^2
if(k==8):
nggl = 22 # number of ggl power spectra
ngcl = 11 # number of cluster-source galaxy power spectra
nlens = 9 # number of lens bins
nlenscl= 4 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y6 15000 deg^2
if(k==7):
nggl = 22 # number of ggl power spectra
ngcl = 10 # number of cluster-source galaxy power spectra
nlens = 9 # number of lens bins
nlenscl= 4 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y6 10000 deg^2
if(k==6):
nggl = 21 # number of ggl power spectra
ngcl = 10 # number of cluster-source galaxy power spectra
nlens = 9 # number of lens bins
nlenscl= 4 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y3 20000 deg^2
if(k==5):
nggl = 16 # number of ggl power spectra
ngcl = 9 # number of cluster-source galaxy power spectra
nlens = 7 # number of lens bins
nlenscl= 3 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y3 15000 deg^2
if(k==4):
nggl = 16 # number of ggl power spectra
ngcl = 9 # number of cluster-source galaxy power spectra
nlens = 7 # number of lens bins
nlenscl= 3 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y3 10000 deg^2
if(k==3):
nggl = 15 # number of ggl power spectra
ngcl = 9 # number of cluster-source galaxy power spectra
nlens = 7 # number of lens bins
nlenscl= 3 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y1 16000 deg^2
if(k==2):
nggl = 11 # number of ggl power spectra
ngcl = 9 # number of cluster-source galaxy power spectra
nlens = 5 # number of lens bins
nlenscl= 3 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y1 13000 deg^2
if(k==1):
nggl = 10 # number of ggl power spectra
ngcl = 9 # number of cluster-source galaxy power spectra
nlens = 5 # number of lens bins
nlenscl= 3 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
#scenario Y1 7500 deg^2
if(k==0):
nggl = 8 # number of ggl power spectra
ngcl = 7 # number of cluster-source galaxy power spectra
nlens = 5 # number of lens bins
nlenscl= 3 # number of cluster redshift bins
nshear = 15 # number of shear tomographic power spectra
ncl=20 # number of ell-bins
nclgcl=5 # number of cluster ell-bins
nrich=5 # number of richness bins
ndata = (nshear+nggl+nlens)*ncl+nlenscl*nrich+nrich*ngcl*nclgcl
n2pt = (nshear+nggl+nlens)*ncl
ncluster = nlenscl*nrich
n2ptcl=n2pt+ncluster
nclusterN_WL=ncluster+nrich*ngcl*nclgcl
datafile= np.genfromtxt(data[k])
mask = np.zeros(ndata)
for i in range(0,datafile.shape[0]):
if (datafile[i,1] >1.0e-15):
mask[i]=1.0
covfile = np.genfromtxt(infile[k])
cov = np.zeros((ndata,ndata))
print ndata,n2pt,int(np.max(covfile[:,0])+1)
for i in range(0,covfile.shape[0]):
cov[int(covfile[i,0]),int(covfile[i,1])] = covfile[i,8]+covfile[i,9]
cov[int(covfile[i,1]),int(covfile[i,0])] = covfile[i,8]+covfile[i,9]
cor = np.zeros((ndata,ndata))
for i in range(0,ndata):
for j in range(0,ndata):
if (cov[i,i]*cov[j,j] >0):
cor[i,j] = cov[i,j]/math.sqrt(cov[i,i]*cov[j,j])
a = np.sort(LA.eigvals(cor[:,:]))
print "min+max eigenvalues full cor:"
print np.min(a), np.max(a)
print "neg eigenvalues full cor:"
for i in range(0,a.shape[0]):
if (a[i]< 0.0): print a[i]
# ############### invert shear covariance #################
inv = LA.inv(cov[0:nshear*ncl,0:nshear*ncl])
a = np.sort(LA.eigvals(cov[0:nshear*ncl,0:nshear*ncl]))
print "min+max eigenvalues shear cov:"
print np.min(a), np.max(a)
outfile = "cov/"+outname[k]+"_shear_shear_inv"
f = open(outfile, "w")
for i in range(0,nshear*ncl):
inv[i,i]=inv[i,i]*mask[i]
for j in range(0,nshear*ncl):
f.write("%d %d %e\n" %(i,j, inv[i,j]))
f.close()
# ############### invert clustering covariance #################
inv = LA.inv(cov[(nshear+nggl)*ncl:(nshear+nggl+nlens)*ncl,(nshear+nggl)*ncl:(nshear+nggl+nlens)*ncl])
a = np.sort(LA.eigvals(cov[(nshear+nggl)*ncl:(nshear+nggl+nlens)*ncl,(nshear+nggl)*ncl:(nshear+nggl+nlens)*ncl]))
print "min+max eigenvalues clustering cov:"
print np.min(a), np.max(a)
outfile = "cov/"+outname[k]+"_pos_pos_inv"
f = open(outfile, "w")
for i in range(0,nlens*ncl):
inv[i,i]=inv[i,i]*mask[(nshear+nggl)*ncl+i]
for j in range(0,nlens*ncl):
f.write("%d %d %e\n" %(i,j, inv[i,j]))
f.close()
# ############### invert 2pt covariance #################
a = np.sort(LA.eigvals(cov[0:n2pt,0:n2pt]))
print "min+max eigenvalues 2pt cov:"
print np.min(a), np.max(a)
inv = LA.inv(cov[0:n2pt,0:n2pt])
outfile = "cov/"+outname[k]+"_3x2pt_inv"
f = open(outfile, "w")
for i in range(0,n2pt):
inv[i,i]=inv[i,i]*mask[i]
for j in range(0,n2pt):
f.write("%d %d %e\n" %( i,j, inv[i,j]))
f.close()
# # ############### invert full2pt+clusterN+clusterWL covariance #################
precond = 1.e-7
for i in range(0,ncluster):
cov[n2pt+i,:]*= precond
cov[:,n2pt+i]*= precond
inv = LA.inv(cov)
a = np.sort(LA.eigvals(cov))
print "min+max eigenvalues of full 2ptclusterN+clusterWL pre-conditioned matrix:"
print np.min(a), np.max(a)
if (np.min(a)<0):
print "WARNING WARNING: %s is not positive definite! WARNING!" % (infile[k])
for i in range(0,ncluster):
inv[n2pt+i,:]*= precond
inv[:,n2pt+i]*= precond
outfile = "cov/"+outname[k]+"_3x2pt_clusterN_clusterWL_inv"
f = open(outfile, "w")
for i in range(0,ndata):
inv[i,i]=inv[i,i]*mask[i]
for j in range(0,ndata):
f.write("%d %d %e\n" %( i,j, inv[i,j]))
f.close()
# # ############### invert clusterN+clusterWL covariance #################
inv = LA.inv(cov[n2pt:n2pt+nclusterN_WL,n2pt:n2pt+nclusterN_WL])
a = np.sort(LA.eigvals(cov[n2pt:n2pt+nclusterN_WL,n2pt:n2pt+nclusterN_WL]))
print "min+max eigenvalues of clusterN_WL pre-conditioned matrix:"
print np.min(a), np.max(a)
if (np.min(a)<0):
print "WARNING WARNING: %s is not positive definite! WARNING!" % (infile[k])
for i in range(0,ncluster):
inv[i,:]*= precond
inv[:,i]*= precond
outfile = "cov/"+outname[k]+"_clusterN_clusterWL_inv"
f = open(outfile, "w")
for i in range(0,nclusterN_WL):
inv[i,i]=inv[i,i]*mask[n2pt+i]
for j in range(0,nclusterN_WL):
f.write("%d %d %e\n" %( i,j, inv[i,j]))
f.close()
cor = np.zeros((ndata,ndata))
for i in range(0,ndata):
for j in range(0,ndata):
if (cov[i,i]*cov[j,j] >0):
cor[i,j] = cov[i,j]/math.sqrt(cov[i,i]*cov[j,j])
# plt.figure()
# #plt.imshow(np.log10(cov[0:1500,2000:]), interpolation="nearest",vmin=-25, vmax=-10)
# plt.imshow(np.log10(cov[:,:]), interpolation="nearest",vmin=-25, vmax=-10)
# #plt.imshow(cor[n2ptcl:n2ptcl+200,300:nshear*ncl], interpolation="nearest",vmax=0.5)
# plt.colorbar()
# plt.show()
| 41.276817 | 1,066 | 0.699304 | 2,062 | 11,929 | 3.947139 | 0.093113 | 0.09436 | 0.066347 | 0.01757 | 0.861408 | 0.807716 | 0.774788 | 0.752304 | 0.725888 | 0.715813 | 0 | 0.138993 | 0.153827 | 11,929 | 288 | 1,067 | 41.420139 | 0.667327 | 0.309246 | 0 | 0.607306 | 0 | 0 | 0.366311 | 0.305971 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.027397 | null | null | 0.077626 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
836c524bf1a4ecbce6c1f4cdd9b543d87c88371c | 121 | py | Python | api/util.py | lonkaars/po-4-op-een-rij | 5cd9eaf116422c82ab68ffcf2dff22e04781b6c5 | [
"MIT"
] | 4 | 2021-02-04T10:33:45.000Z | 2021-03-25T11:26:27.000Z | api/util.py | lonkaars/connect-4 | 5cd9eaf116422c82ab68ffcf2dff22e04781b6c5 | [
"MIT"
] | 21 | 2021-04-05T17:16:04.000Z | 2021-06-12T12:47:22.000Z | api/util.py | lonkaars/connect-4 | 5cd9eaf116422c82ab68ffcf2dff22e04781b6c5 | [
"MIT"
] | 2 | 2021-04-23T07:06:11.000Z | 2021-04-27T08:44:26.000Z | def all_def(props):
return all(bool(v) for v in props)
def all_notdef(props):
return all(not v for v in props)
| 17.285714 | 38 | 0.677686 | 24 | 121 | 3.333333 | 0.416667 | 0.15 | 0.35 | 0.175 | 0.3 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.223141 | 121 | 6 | 39 | 20.166667 | 0.851064 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
838b682080d6a1162e75bea9c925f6edee5691f0 | 7,474 | py | Python | tasks-deploy/strange-archive/check.py | irdkwmnsb/lkshl-ctf | e5c0200ddc8ba73df5f321b87b9763fb1bbaba57 | [
"MIT"
] | 3 | 2021-03-30T06:27:58.000Z | 2021-04-03T17:56:35.000Z | tasks-deploy/strange-archive/check.py | irdkwmnsb/lkshl-ctf | e5c0200ddc8ba73df5f321b87b9763fb1bbaba57 | [
"MIT"
] | null | null | null | tasks-deploy/strange-archive/check.py | irdkwmnsb/lkshl-ctf | e5c0200ddc8ba73df5f321b87b9763fb1bbaba57 | [
"MIT"
] | null | null | null | def check(attempt, context):
if attempt.answer == flags[attempt.participant.id % len(flags)]:
return Checked(True)
if attempt.answer in flags:
return CheckedPlagiarist(False, flags.index(attempt.answer))
return Checked(False)
flags = ['LKL{S4Y_H3LLO_T0_G1T_OQUe6Ie2Ye}', 'LKL{S4Y_H3LLO_T0_G1T_wh5Mu25Ahw}', 'LKL{S4Y_H3LLO_T0_G1T_OSDZfgsCEW}', 'LKL{S4Y_H3LLO_T0_G1T_TqLezjAM21}', 'LKL{S4Y_H3LLO_T0_G1T_IcV3goEcF3}', 'LKL{S4Y_H3LLO_T0_G1T_MfNJ9hxQLO}', 'LKL{S4Y_H3LLO_T0_G1T_ENP3EhHArD}', 'LKL{S4Y_H3LLO_T0_G1T_6Anx68irwr}', 'LKL{S4Y_H3LLO_T0_G1T_gfCAIoVAvg}', 'LKL{S4Y_H3LLO_T0_G1T_kc2oyfGWRh}', 'LKL{S4Y_H3LLO_T0_G1T_5TgKaNYRBO}', 'LKL{S4Y_H3LLO_T0_G1T_syhSNvQN90}', 'LKL{S4Y_H3LLO_T0_G1T_PC7fcVBjM8}', 'LKL{S4Y_H3LLO_T0_G1T_9Pf6S057be}', 'LKL{S4Y_H3LLO_T0_G1T_VDWCNj0ulz}', 'LKL{S4Y_H3LLO_T0_G1T_aqn31wbUHo}', 'LKL{S4Y_H3LLO_T0_G1T_cxXJPK4bBh}', 'LKL{S4Y_H3LLO_T0_G1T_4Y49Z47z0j}', 'LKL{S4Y_H3LLO_T0_G1T_h95wg6zsi3}', 'LKL{S4Y_H3LLO_T0_G1T_VUqMC2Hbff}', 'LKL{S4Y_H3LLO_T0_G1T_uv8FKKVB3H}', 'LKL{S4Y_H3LLO_T0_G1T_YMXaAGxiMy}', 'LKL{S4Y_H3LLO_T0_G1T_C46xrT80Yl}', 'LKL{S4Y_H3LLO_T0_G1T_Pwjv8hYgRI}', 'LKL{S4Y_H3LLO_T0_G1T_U2B4XMwSWm}', 'LKL{S4Y_H3LLO_T0_G1T_pqCrM0xQQo}', 'LKL{S4Y_H3LLO_T0_G1T_8Pf6jQUE2L}', 'LKL{S4Y_H3LLO_T0_G1T_SN1o6zjKcn}', 'LKL{S4Y_H3LLO_T0_G1T_vUyPc8MtNa}', 'LKL{S4Y_H3LLO_T0_G1T_1HhPu3kpUC}', 'LKL{S4Y_H3LLO_T0_G1T_JVj293c8Eb}', 'LKL{S4Y_H3LLO_T0_G1T_8vJOOyKIOk}', 'LKL{S4Y_H3LLO_T0_G1T_2RPA3s1v3V}', 'LKL{S4Y_H3LLO_T0_G1T_TEKK8XMZVi}', 'LKL{S4Y_H3LLO_T0_G1T_i4wCFBEfd0}', 'LKL{S4Y_H3LLO_T0_G1T_nQFdV7AFDQ}', 'LKL{S4Y_H3LLO_T0_G1T_6hkg9LzwZA}', 'LKL{S4Y_H3LLO_T0_G1T_kUrZsWMUzC}', 'LKL{S4Y_H3LLO_T0_G1T_6vpDlF2VXd}', 'LKL{S4Y_H3LLO_T0_G1T_ckeYYJO0Js}', 'LKL{S4Y_H3LLO_T0_G1T_zpxen1ZmuX}', 'LKL{S4Y_H3LLO_T0_G1T_nNzzlrSUsm}', 'LKL{S4Y_H3LLO_T0_G1T_GSh5CV1WmI}', 'LKL{S4Y_H3LLO_T0_G1T_b2llsCJpBZ}', 'LKL{S4Y_H3LLO_T0_G1T_5Vy8szyfyH}', 'LKL{S4Y_H3LLO_T0_G1T_2eTMzrvVLq}', 'LKL{S4Y_H3LLO_T0_G1T_RJTmPlmOze}', 'LKL{S4Y_H3LLO_T0_G1T_5DYDORUkL9}', 'LKL{S4Y_H3LLO_T0_G1T_wBJHY94VWB}', 'LKL{S4Y_H3LLO_T0_G1T_Ng121h9wMn}', 'LKL{S4Y_H3LLO_T0_G1T_lll6SKqZ7k}', 'LKL{S4Y_H3LLO_T0_G1T_1eBZUq3LG7}', 'LKL{S4Y_H3LLO_T0_G1T_crczftNoqi}', 'LKL{S4Y_H3LLO_T0_G1T_toep44O8ZV}', 'LKL{S4Y_H3LLO_T0_G1T_1ORtgJO1Jq}', 'LKL{S4Y_H3LLO_T0_G1T_FlJE7cYI4J}', 'LKL{S4Y_H3LLO_T0_G1T_dLgmfq0E1R}', 'LKL{S4Y_H3LLO_T0_G1T_mjuBukLtyp}', 'LKL{S4Y_H3LLO_T0_G1T_Y0se1ZomLW}', 'LKL{S4Y_H3LLO_T0_G1T_dHJgoklKja}', 'LKL{S4Y_H3LLO_T0_G1T_VkQRx5IdI3}', 'LKL{S4Y_H3LLO_T0_G1T_ErPEzwUPMF}', 'LKL{S4Y_H3LLO_T0_G1T_Q6BglfM8B2}', 'LKL{S4Y_H3LLO_T0_G1T_uUSWAVvBp0}', 'LKL{S4Y_H3LLO_T0_G1T_5oppALTGMU}', 'LKL{S4Y_H3LLO_T0_G1T_o1LIiBE5QT}', 'LKL{S4Y_H3LLO_T0_G1T_qFsAsGc4JA}', 'LKL{S4Y_H3LLO_T0_G1T_lm8oJ8P2WT}', 'LKL{S4Y_H3LLO_T0_G1T_rHtsYYntVl}', 'LKL{S4Y_H3LLO_T0_G1T_cVrbNsbZMN}', 'LKL{S4Y_H3LLO_T0_G1T_HuM9DBQRHI}', 'LKL{S4Y_H3LLO_T0_G1T_Ne4aoW1KQI}', 'LKL{S4Y_H3LLO_T0_G1T_MqEsXX3LiI}', 'LKL{S4Y_H3LLO_T0_G1T_6sHmwLLPQj}', 'LKL{S4Y_H3LLO_T0_G1T_s0Ie0PREBM}', 'LKL{S4Y_H3LLO_T0_G1T_LE9hd1XzVp}', 'LKL{S4Y_H3LLO_T0_G1T_q81uavjLlX}', 'LKL{S4Y_H3LLO_T0_G1T_RvNrrbjT3R}', 'LKL{S4Y_H3LLO_T0_G1T_knrCPzzxJ5}', 'LKL{S4Y_H3LLO_T0_G1T_L6P59r6jWe}', 'LKL{S4Y_H3LLO_T0_G1T_f6LQM2UlGz}', 'LKL{S4Y_H3LLO_T0_G1T_mrBK7i4YTM}', 'LKL{S4Y_H3LLO_T0_G1T_31M2B51lC7}', 'LKL{S4Y_H3LLO_T0_G1T_fVk6o2jhK6}', 'LKL{S4Y_H3LLO_T0_G1T_7Q8un8Iyho}', 'LKL{S4Y_H3LLO_T0_G1T_9rNhKt1NPe}', 'LKL{S4Y_H3LLO_T0_G1T_UTV5S1tJL7}', 'LKL{S4Y_H3LLO_T0_G1T_urqz3u7s0a}', 'LKL{S4Y_H3LLO_T0_G1T_QckdlYhhaT}', 'LKL{S4Y_H3LLO_T0_G1T_BrzexukPg7}', 'LKL{S4Y_H3LLO_T0_G1T_4nTPqix2Sz}', 'LKL{S4Y_H3LLO_T0_G1T_0DHim2LDx5}', 'LKL{S4Y_H3LLO_T0_G1T_YLJZ8kYW0m}', 'LKL{S4Y_H3LLO_T0_G1T_SAr0JKPNLB}', 'LKL{S4Y_H3LLO_T0_G1T_KGhyU55AA6}', 'LKL{S4Y_H3LLO_T0_G1T_VPDf5FTYX5}', 'LKL{S4Y_H3LLO_T0_G1T_ptgPBs9oCL}', 'LKL{S4Y_H3LLO_T0_G1T_bOIVjxWsTF}', 'LKL{S4Y_H3LLO_T0_G1T_7XDL9vnX7P}', 'LKL{S4Y_H3LLO_T0_G1T_uUq5iINq9t}', 'LKL{S4Y_H3LLO_T0_G1T_LAXfJom2Pp}', 'LKL{S4Y_H3LLO_T0_G1T_7e8Ik69mGa}', 'LKL{S4Y_H3LLO_T0_G1T_0VBTuPHWNc}', 'LKL{S4Y_H3LLO_T0_G1T_YkDvFSfs5n}', 'LKL{S4Y_H3LLO_T0_G1T_JhF3ZiiaE0}', 'LKL{S4Y_H3LLO_T0_G1T_jq0tjHWsOu}', 'LKL{S4Y_H3LLO_T0_G1T_1H2Ea2Sa3p}', 'LKL{S4Y_H3LLO_T0_G1T_uMHNxKTios}', 'LKL{S4Y_H3LLO_T0_G1T_mxH2cv26Dw}', 'LKL{S4Y_H3LLO_T0_G1T_ir6p5zpQ9x}', 'LKL{S4Y_H3LLO_T0_G1T_8nIgfhNZXg}', 'LKL{S4Y_H3LLO_T0_G1T_sOKD6kl2eU}', 'LKL{S4Y_H3LLO_T0_G1T_usVsylnNep}', 'LKL{S4Y_H3LLO_T0_G1T_ZVpCJZ4Ak0}', 'LKL{S4Y_H3LLO_T0_G1T_Ymsn7VnyRA}', 'LKL{S4Y_H3LLO_T0_G1T_pt7xJzfnMU}', 'LKL{S4Y_H3LLO_T0_G1T_QPONfg3OFN}', 'LKL{S4Y_H3LLO_T0_G1T_ZGYLgxCim0}', 'LKL{S4Y_H3LLO_T0_G1T_VLckDqXCpf}', 'LKL{S4Y_H3LLO_T0_G1T_2PFpjwSBMi}', 'LKL{S4Y_H3LLO_T0_G1T_lVcRDOFvP2}', 'LKL{S4Y_H3LLO_T0_G1T_Y8xwPDpwP1}', 'LKL{S4Y_H3LLO_T0_G1T_7LQkvFysG6}', 'LKL{S4Y_H3LLO_T0_G1T_4ytQ5KANK7}', 'LKL{S4Y_H3LLO_T0_G1T_tkztjFq35j}', 'LKL{S4Y_H3LLO_T0_G1T_TMX5W0pd7o}', 'LKL{S4Y_H3LLO_T0_G1T_LqsHVQwaVk}', 'LKL{S4Y_H3LLO_T0_G1T_5Gn1Vg6L3k}', 'LKL{S4Y_H3LLO_T0_G1T_SoLFYW41fe}', 'LKL{S4Y_H3LLO_T0_G1T_daQ04gTs3Z}', 'LKL{S4Y_H3LLO_T0_G1T_PnEddDGJxd}', 'LKL{S4Y_H3LLO_T0_G1T_0OTeRdqbzS}', 'LKL{S4Y_H3LLO_T0_G1T_Y2JDkGwl0t}', 'LKL{S4Y_H3LLO_T0_G1T_ER3yGEFBM7}', 'LKL{S4Y_H3LLO_T0_G1T_EwYf1QhDHA}', 'LKL{S4Y_H3LLO_T0_G1T_Genwrm4Rl9}', 'LKL{S4Y_H3LLO_T0_G1T_8ecYSAKSce}', 'LKL{S4Y_H3LLO_T0_G1T_WBRsPyOoA3}', 'LKL{S4Y_H3LLO_T0_G1T_98o8n2b12U}', 'LKL{S4Y_H3LLO_T0_G1T_GtEGaPGdaB}', 'LKL{S4Y_H3LLO_T0_G1T_S0FlcREUf1}', 'LKL{S4Y_H3LLO_T0_G1T_HlQ7nKMfKQ}', 'LKL{S4Y_H3LLO_T0_G1T_Vj3dcAdO6y}', 'LKL{S4Y_H3LLO_T0_G1T_TMG5sAkoRq}', 'LKL{S4Y_H3LLO_T0_G1T_awJOlrkI6k}', 'LKL{S4Y_H3LLO_T0_G1T_XfpQlFKM9T}', 'LKL{S4Y_H3LLO_T0_G1T_kSJM3c6s2b}', 'LKL{S4Y_H3LLO_T0_G1T_Tm3Ze70t4H}', 'LKL{S4Y_H3LLO_T0_G1T_9t7j2V5HMJ}', 'LKL{S4Y_H3LLO_T0_G1T_1n8aVhjfbP}', 'LKL{S4Y_H3LLO_T0_G1T_zkRHFIQ2ox}', 'LKL{S4Y_H3LLO_T0_G1T_HlMvQpRh0W}', 'LKL{S4Y_H3LLO_T0_G1T_iEjxtSvQcl}', 'LKL{S4Y_H3LLO_T0_G1T_d3JqytufLA}', 'LKL{S4Y_H3LLO_T0_G1T_qx3lGwDrUO}', 'LKL{S4Y_H3LLO_T0_G1T_SfZJGXVtcZ}', 'LKL{S4Y_H3LLO_T0_G1T_5ioDd4iN05}', 'LKL{S4Y_H3LLO_T0_G1T_PcN0NUZaxb}', 'LKL{S4Y_H3LLO_T0_G1T_4TKmCNDaH5}', 'LKL{S4Y_H3LLO_T0_G1T_fIoSjdEKyq}', 'LKL{S4Y_H3LLO_T0_G1T_jZ4cjOkkOw}', 'LKL{S4Y_H3LLO_T0_G1T_FVnYLeELcx}', 'LKL{S4Y_H3LLO_T0_G1T_tGF5Tv3FUZ}', 'LKL{S4Y_H3LLO_T0_G1T_Du8NSibidd}', 'LKL{S4Y_H3LLO_T0_G1T_akpJ90mGmF}', 'LKL{S4Y_H3LLO_T0_G1T_n5QKkOVmII}', 'LKL{S4Y_H3LLO_T0_G1T_Hf3rErDyqs}', 'LKL{S4Y_H3LLO_T0_G1T_eda0s4bKx9}', 'LKL{S4Y_H3LLO_T0_G1T_5DcPLUKW2s}', 'LKL{S4Y_H3LLO_T0_G1T_8kNm2MAMxw}', 'LKL{S4Y_H3LLO_T0_G1T_tCOabGA3vP}', 'LKL{S4Y_H3LLO_T0_G1T_O2wPXjsigc}', 'LKL{S4Y_H3LLO_T0_G1T_stLZaHfTZU}', 'LKL{S4Y_H3LLO_T0_G1T_UhVwj3PPiI}', 'LKL{S4Y_H3LLO_T0_G1T_4baqBtJMpr}', 'LKL{S4Y_H3LLO_T0_G1T_Tvh9LQcz80}', 'LKL{S4Y_H3LLO_T0_G1T_qjWLyPXO91}', 'LKL{S4Y_H3LLO_T0_G1T_uwRcuf5jUc}', 'LKL{S4Y_H3LLO_T0_G1T_Kz42ABpHbo}', 'LKL{S4Y_H3LLO_T0_G1T_br9BrodPDd}', 'LKL{S4Y_H3LLO_T0_G1T_zq5UBRxr6f}', 'LKL{S4Y_H3LLO_T0_G1T_gtj8eIW28a}', 'LKL{S4Y_H3LLO_T0_G1T_C3piEVKX90}', 'LKL{S4Y_H3LLO_T0_G1T_KiRjj9d7tX}', 'LKL{S4Y_H3LLO_T0_G1T_AfFGOHyP72}', 'LKL{S4Y_H3LLO_T0_G1T_Wf3RZMc9De}', 'LKL{S4Y_H3LLO_T0_G1T_kuU61QSJ6Q}', 'LKL{S4Y_H3LLO_T0_G1T_Y74ZB77bQl}', 'LKL{S4Y_H3LLO_T0_G1T_E9FKM0ziZ5}', 'LKL{S4Y_H3LLO_T0_G1T_bMFzh0p60Z}', 'LKL{S4Y_H3LLO_T0_G1T_3zTwtCHFT3}', 'LKL{S4Y_H3LLO_T0_G1T_dVi1011mVd}', 'LKL{S4Y_H3LLO_T0_G1T_91LjncDLPH}', 'LKL{S4Y_H3LLO_T0_G1T_KwABfkIxVD}', 'LKL{S4Y_H3LLO_T0_G1T_ybdkTt0Tya}', 'LKL{S4Y_H3LLO_T0_G1T_0KPPV6HW8P}', 'LKL{S4Y_H3LLO_T0_G1T_hcNW8abtWb}', 'LKL{S4Y_H3LLO_T0_G1T_Z5eBfyz9xb}', 'LKL{S4Y_H3LLO_T0_G1T_KwQKLhgJ85}', 'LKL{S4Y_H3LLO_T0_G1T_sq5lDx6CCQ}'] | 934.25 | 7,208 | 0.827268 | 1,232 | 7,474 | 4.369318 | 0.180195 | 0.222924 | 0.408694 | 0.483002 | 0.594464 | 0 | 0 | 0 | 0 | 0 | 0 | 0.160128 | 0.034921 | 7,474 | 8 | 7,208 | 934.25 | 0.586164 | 0 | 0 | 0 | 0 | 0 | 0.85699 | 0.85699 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142857 | false | 0 | 0 | 0 | 0.571429 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
8393e4a2ddf5992fc0d335a66c559705c2b1dcbf | 5,476 | py | Python | test.py | Programmer-RD-AI/Intrested-or-Not-Product | 3dba3704e6bce4826dfe727a9fd60b506f4886bc | [
"Apache-2.0"
] | 2 | 2021-08-16T09:23:28.000Z | 2021-08-23T12:10:37.000Z | test.py | Programmer-RD-AI/Intrested-or-Not-Product | 3dba3704e6bce4826dfe727a9fd60b506f4886bc | [
"Apache-2.0"
] | null | null | null | test.py | Programmer-RD-AI/Intrested-or-Not-Product | 3dba3704e6bce4826dfe727a9fd60b506f4886bc | [
"Apache-2.0"
] | null | null | null | # this is just a kind of a backup file
from torchvision import models
from imports import *
from train import *
from torchvision.models import *
hp = Help_Funcs()
data, labels = hp.load_data()
import json
with open(
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/labels.json",
"w",
) as file:
json.dump(labels, file)
with open(
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/labels.json",
"r",
) as file:
labels = json.load(file)
X_train, X_test, y_test, y_train = hp.split_data(labels, data)
torch.save(
X_train,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/X_train.pt",
)
torch.save(
X_train,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/X_train.pth",
)
torch.save(
y_train,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/y_train.pt",
)
torch.save(
y_train,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/y_train.pth",
)
torch.save(
X_test,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/X_test.pt",
)
torch.save(
X_test,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/X_test.pth",
)
torch.save(
y_test,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/y_test.pt",
)
torch.save(
y_test,
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/y_test.pth",
)
X_train = torch.load(
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/X_train.pt"
)
y_train = torch.load(
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/y_train.pt"
)
X_test = torch.load(
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/X_test.pt"
)
y_test = torch.load(
"/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/cleaned/y_test.pt"
)
# model = Clf()
# train_testing(X_train, y_train, X_test, y_test, model, "Clf-0", config=config)
# model = CNN()
# train_testing(X_train, y_train, X_test, y_test, model, "CNN-0", config=config)
# model = Clf_and_Conv1d()
# train_testing(
# X_train, y_train, X_test, y_test, model, "Clf_and_Conv1d-0", config=config
# )
# model = TL_Model(alexnet)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-alexnet", config=config,
# )
# model = TL_Model(squeezenet1_0)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-squeezenet1_0", config=config,
# )
# model = TL_Model(squeezenet1_1)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-squeezenet1_1", config=config,
# )
# model = TL_Model(googlenet)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-googlenet", config=config,
# )
# model = TL_Model(resnext50_32x4d)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-resnext50_32x4d", config=config,
# )
# model = TL_Model(wide_resnet50_2)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-wide_resnet50_2", config=config,
# )
# model = TL_Model(mnasnet0_5)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-mnasnet0_5", config=config,
# )
# model = TL_Model(mobilenet_v2)
# train_testing(
# X_train, y_train, X_test, y_test, model, f"TL_Model-mobilenet_v2", config=config,
# )
# model = TL_Model(shufflenet_v2_x0_5)
# train_testing(
# X_train,
# y_train,
# X_test,
# y_test,
# model,
# f"TL_Model-shufflenet_v2_x0_5",
# config=config,
# )
config["batch_size"] = 32
config["optimizer"] = AdamW
config["lr"] = 0.001
config["criterion"] = BCELoss()
config["epochs"] = 12
hp = Help_Funcs()
data, labels = hp.load_data(
directory="/home/indika/Programming/Projects/Python/Artifical-Intelligence/PyTorch/Competions/Intrested-or-Not-Product-V2/data/raw/"
)
X_train, X_test, y_test, y_train = hp.split_data(labels, data)
model = TL_Model(models.shufflenet_v2_x0_5)
model = train_testing(
X_train, y_train, X_test, y_test, model, f"TL_Model-{config['epochs']}", config=config,
)
paths = os.listdir("./data/test_data/")
new_paths = []
for path in paths:
new_paths.append(f"./data/test_data/{path}")
hp.get_multiple_preds(paths=new_paths, model=model)
torch.cuda.empty_cache()
model = TL_Model_2(shufflenet_v2_x0_5)
torch.cuda.empty_cache()
train_testing(
X_train,
y_train,
X_test,
y_test,
model,
f"TL_Model_2-shufflenet_v2_x0_5",
config=config,
)
torch.cuda.empty_cache()
model = TL_Model_2(mnasnet0_5)
torch.cuda.empty_cache()
train_testing(
X_train, y_train, X_test, y_test, model, f"TL_Model_2-mnasnet0_5", config=config,
)
torch.cuda.empty_cache()
| 34.440252 | 142 | 0.744704 | 825 | 5,476 | 4.713939 | 0.122424 | 0.043199 | 0.043713 | 0.048084 | 0.834919 | 0.788892 | 0.751864 | 0.733093 | 0.700694 | 0.700694 | 0 | 0.016643 | 0.111213 | 5,476 | 158 | 143 | 34.658228 | 0.782412 | 0.288714 | 0 | 0.37 | 0 | 0.15 | 0.559719 | 0.545407 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.05 | 0 | 0.05 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
83a5870fbd2dee5ed4aa00a32168e6947f92f79e | 4,936 | py | Python | google_test/bunny_escape/bunnyEscape_comp.py | perlygatekeeper/glowing-robot | 7ef5eb089f552a1de309092606c95e805e6723a0 | [
"Artistic-2.0"
] | 2 | 2015-06-05T15:40:06.000Z | 2020-03-19T17:08:37.000Z | google_test/bunny_escape/bunnyEscape_comp.py | perlygatekeeper/glowing-robot | 7ef5eb089f552a1de309092606c95e805e6723a0 | [
"Artistic-2.0"
] | null | null | null | google_test/bunny_escape/bunnyEscape_comp.py | perlygatekeeper/glowing-robot | 7ef5eb089f552a1de309092606c95e805e6723a0 | [
"Artistic-2.0"
] | null | null | null | def pathFinder(x, y, map, steps, lastX, lastY, wall):
# count possible moves
options = []
if x-1 >= 0: # East
options.append([-1, 0])
if x+1 <= lastX: # West
options.append([ 1, 0])
if y-1 >= 0: # North
options.append([ 0,-1])
if y+1 <= lastY: # South
options.append([ 0, 1])
# increment step
steps += 1
for option in options:
# new x and y
newX = x + option[0]
newY = y + option[1]
# if statements
if map[newY][newX] == 0:
map[newY][newX] = steps
if newX != 0 or newY != 0:
pathFinder(newX, newY, map, steps, lastX, lastY, wall)
elif map[newY][newX] > 1 and steps <= map[newY][newX]:
map[newY][newX] = steps
if newX != 0 or newY != 0:
pathFinder(newX, newY, map, steps, lastX, lastY, wall)
elif ( map[newY][newX] == 1 or map[newY][newX] < 0 ) and not wall and (newX != lastX or newY != lastY):
wall = True
map[newY][newX] = steps * -1
pathFinder(newX, newY, map, steps, lastX, lastY, wall)
wall = False
elif map[newY][newX] > 1 and steps < abs(map[newY][newX]):
if(map[newY][newX] < 0):
map[newY][newX] = steps * -1
if(map[newY][newX] > 0):
map[newY][newX] = steps
if newX != 0 or newY != 0:
pathFinder(newX, newY, map, steps, lastX, lastY, wall)
def solution(map):
steps = 1
lastX = len(map[0]) - 1
lastY = len(map) - 1
x = lastX
y = lastY
testMap = map[:]
testMap[y][x] = 1
pathFinder(x, y, testMap, steps, lastX, lastY, False)
return(testMap[0][0])
# print(solution([[0, 1], [0, 0]]))
# print(solution([[0, 1, 1, 0], [0, 0, 0, 1], [1, 1, 0, 0], [1, 1, 1, 0]]))
# print(solution([[0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1], [0, 1, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0]]))
# print(solution([[0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 0], [0, 0, 0, 0, 0, 0], [0, 1, 1, 1, 1, 1], [0, 1, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0]]))
print(solution([
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1],
[0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]))
'''
print(solution([
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]))
'''
| 46.566038 | 140 | 0.370543 | 1,142 | 4,936 | 1.601576 | 0.036778 | 0.345544 | 0.449426 | 0.52269 | 0.775287 | 0.734828 | 0.712958 | 0.691635 | 0.669765 | 0.645708 | 0 | 0.29678 | 0.364465 | 4,936 | 105 | 141 | 47.009524 | 0.286261 | 0.094814 | 0 | 0.393939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030303 | false | 0 | 0 | 0 | 0.030303 | 0.015152 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
83a7187d9ecf8b59e89d062b3dfdce7f267f3779 | 6,937 | py | Python | aifeynman/S_change_output.py | neighthan/AI-Feynman | de3be5250c759877175ea1725dea64000794bd3a | [
"MIT"
] | 1 | 2020-12-17T08:10:38.000Z | 2020-12-17T08:10:38.000Z | aifeynman/S_change_output.py | neighthan/AI-Feynman | de3be5250c759877175ea1725dea64000794bd3a | [
"MIT"
] | null | null | null | aifeynman/S_change_output.py | neighthan/AI-Feynman | de3be5250c759877175ea1725dea64000794bd3a | [
"MIT"
] | null | null | null | import os
import numpy as np
from .S_run_bf_polyfit import run_bf_polyfit
def get_acos(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.arccos(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"acos",
)
except:
return PA
return PA
def get_asin(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.arcsin(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"asin",
)
except:
return PA
return PA
def get_atan(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.arctan(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"atan",
)
except:
return PA
return PA
def get_cos(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.cos(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"cos",
)
except:
return PA
return PA
def get_exp(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.exp(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"exp",
)
except:
return PA
return PA
def get_inverse(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = 1 / data[:, -1]
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"inverse",
)
except:
return PA
return PA
def get_log(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.log(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"log",
)
except:
return PA
return PA
def get_sin(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.sin(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"sin",
)
except:
return PA
return PA
def get_sqrt(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.sqrt(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"sqrt",
)
except:
return PA
return PA
def get_squared(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = data[:, -1] ** 2
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"squared",
)
except:
return PA
return PA
def get_tan(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg=3,
):
try:
os.mkdir(pathdir_write_to)
except:
pass
data = np.loadtxt(pathdir + filename)
try:
data[:, -1] = np.tan(data[:, -1])
np.savetxt(pathdir_write_to + filename, data)
PA = run_bf_polyfit(
pathdir,
pathdir_write_to,
filename,
BF_try_time,
BF_ops_file_type,
PA,
polyfit_deg,
"tan",
)
except:
return PA
return PA
| 18.647849 | 53 | 0.509154 | 794 | 6,937 | 4.123426 | 0.059194 | 0.161271 | 0.188149 | 0.221747 | 0.939524 | 0.932804 | 0.932804 | 0.847282 | 0.847282 | 0.847282 | 0 | 0.008464 | 0.403921 | 6,937 | 371 | 54 | 18.698113 | 0.783313 | 0 | 0 | 0.858859 | 0 | 0 | 0.006487 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033033 | false | 0.033033 | 0.009009 | 0 | 0.108108 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
83f891940b9b5a52ea7d510ea1e4b73222bb26b4 | 32,326 | py | Python | test/enum/test.py | katonori/cxxtags | 2730adb040d1334caa1a9ae7e468f498e094d254 | [
"BSD-3-Clause"
] | null | null | null | test/enum/test.py | katonori/cxxtags | 2730adb040d1334caa1a9ae7e468f498e094d254 | [
"BSD-3-Clause"
] | null | null | null | test/enum/test.py | katonori/cxxtags | 2730adb040d1334caa1a9ae7e468f498e094d254 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
import sys
import os
sys.path.append("../../src/")
sys.path.append("../util/")
import common
import commands
CXXTAGS_QUERY = "../../bin/cxxtags_query"
if len(sys.argv) != 2:
print "usage: cmd db_file"
exit(1)
db_dir = sys.argv[1]
cur_dir = os.getcwd()
q_list = [
# main.cpp
"decl "+db_dir+" "+cur_dir+"/main.cpp 4 5", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 4 5", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 4 5", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 5 5", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 5 5", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 5 5", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 6 5", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 6 5", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 6 5", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 7 5", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 7 5", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 7 5", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 11 5", #VAL1_0
"def "+db_dir+" "+cur_dir+"/main.cpp 11 5", #VAL1_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 11 5", #VAL1_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 12 5", #VAL1_1
"def "+db_dir+" "+cur_dir+"/main.cpp 12 5", #VAL1_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 12 5", #VAL1_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 13 5", #VAL1_2
"def "+db_dir+" "+cur_dir+"/main.cpp 13 5", #VAL1_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 13 5", #VAL1_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 14 5", #VAL1_3
"def "+db_dir+" "+cur_dir+"/main.cpp 14 5", #VAL1_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 14 5", #VAL1_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 17 13", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 17 13", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 17 13", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 19 35", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 19 35", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 19 35", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 19 43", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 19 43", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 19 43", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 19 51", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 19 51", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 19 51", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 19 59", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 19 59", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 19 59", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 22 11", #NS0
"def "+db_dir+" "+cur_dir+"/main.cpp 22 11", #NS0
"ref "+db_dir+" "+cur_dir+"/main.cpp 22 11", #NS0
"decl "+db_dir+" "+cur_dir+"/main.cpp 24 9", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 24 9", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 24 9", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 25 9", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 25 9", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 25 9", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 26 9", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 26 9", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 26 9", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 27 9", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 27 9", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 27 9", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 29 17", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 29 17", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 29 17", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 31 42", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 31 42", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 31 42", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 31 50", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 31 50", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 31 50", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 31 58", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 31 58", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 31 58", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 31 66", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 31 66", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 31 66", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 33 11", #C0
"def "+db_dir+" "+cur_dir+"/main.cpp 33 11", #C0
"ref "+db_dir+" "+cur_dir+"/main.cpp 33 11", #C0
"decl "+db_dir+" "+cur_dir+"/main.cpp 36 13", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 36 13", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 36 13", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 37 13", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 37 13", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 37 13", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 38 13", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 38 13", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 38 13", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 39 13", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 39 13", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 39 13", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 41 14", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 41 14", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 41 14", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 43 50", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 43 50", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 43 50", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 43 58", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 43 58", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 43 58", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 43 66", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 43 66", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 43 66", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 43 74", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 43 74", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 43 74", #VAL0_3
#"decl "+db_dir+" "+cur_dir+"/main.cpp 46 11", #C1
#"decl "+db_dir+" "+cur_dir+"/main.cpp 46 23", #C0
"decl "+db_dir+" "+cur_dir+"/main.cpp 50 13", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 50 13", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 50 13", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 51 13", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 51 13", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 51 13", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 52 13", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 52 13", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 52 13", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 53 13", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 53 13", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 53 13", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 55 14", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 55 14", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 55 14", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 57 50", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 57 50", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 57 50", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 57 58", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 57 58", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 57 58", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 57 66", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 57 66", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 57 66", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 57 74", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 57 74", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 57 74", #VAL0_3
#"decl "+db_dir+" "+cur_dir+"/main.cpp 62 11", #NS1
"decl "+db_dir+" "+cur_dir+"/main.cpp 64 9", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 64 9", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 64 9", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 65 9", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 65 9", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 65 9", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 66 9", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 66 9", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 66 9", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 67 9", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 67 9", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 67 9", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 69 17", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 69 17", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 69 17", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 71 42", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 71 42", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 71 42", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 71 50", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 71 50", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 71 50", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 71 58", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 71 58", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 71 58", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 71 66", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 71 66", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 71 66", #VAL0_3
#"decl "+db_dir+" "+cur_dir+"/main.cpp 73 11", #C0
"decl "+db_dir+" "+cur_dir+"/main.cpp 76 13", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 76 13", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 76 13", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 77 13", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 77 13", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 77 13", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 78 13", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 78 13", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 78 13", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 79 13", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 79 13", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 79 13", #VAL0_3
"decl "+db_dir+" "+cur_dir+"/main.cpp 81 14", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 81 14", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 81 14", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 83 50", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 83 50", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 83 50", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 83 58", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 83 58", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 83 58", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 83 66", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 83 66", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 83 66", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 83 74", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 83 74", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 83 74", #VAL0_3
#"decl "+db_dir+" "+cur_dir+"/main.cpp 86 11", #C1
#"decl "+db_dir+" "+cur_dir+"/main.cpp 86 23", #C0
"decl "+db_dir+" "+cur_dir+"/main.cpp 89 14", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 89 14", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 89 14", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 91 50", #VAL0_0
"def "+db_dir+" "+cur_dir+"/main.cpp 91 50", #VAL0_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 91 50", #VAL0_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 91 58", #VAL0_1
"def "+db_dir+" "+cur_dir+"/main.cpp 91 58", #VAL0_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 91 58", #VAL0_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 91 66", #VAL0_2
"def "+db_dir+" "+cur_dir+"/main.cpp 91 66", #VAL0_2
"ref "+db_dir+" "+cur_dir+"/main.cpp 91 66", #VAL0_2
"decl "+db_dir+" "+cur_dir+"/main.cpp 91 74", #VAL0_3
"def "+db_dir+" "+cur_dir+"/main.cpp 91 74", #VAL0_3
"ref "+db_dir+" "+cur_dir+"/main.cpp 91 74", #VAL0_3
#"decl "+db_dir+" "+cur_dir+"/main.cpp 96 5", #main
#"decl "+db_dir+" "+cur_dir+"/main.cpp 98 5", #NS0
#"decl "+db_dir+" "+cur_dir+"/main.cpp 98 10", #C0
#"decl "+db_dir+" "+cur_dir+"/main.cpp 98 13", #c00
#"decl "+db_dir+" "+cur_dir+"/main.cpp 99 5", #NS0
#"decl "+db_dir+" "+cur_dir+"/main.cpp 99 10", #C1
#"decl "+db_dir+" "+cur_dir+"/main.cpp 99 13", #c01
#"decl "+db_dir+" "+cur_dir+"/main.cpp 100 5", #NS1
#"decl "+db_dir+" "+cur_dir+"/main.cpp 100 10", #C0
#"decl "+db_dir+" "+cur_dir+"/main.cpp 100 13", #c10
#"decl "+db_dir+" "+cur_dir+"/main.cpp 101 5", #NS1
#"decl "+db_dir+" "+cur_dir+"/main.cpp 101 10", #C1
#"decl "+db_dir+" "+cur_dir+"/main.cpp 101 13", #c11
"decl "+db_dir+" "+cur_dir+"/main.cpp 102 7", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 102 7", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 102 7", #check
#"decl "+db_dir+" "+cur_dir+"/main.cpp 103 5", #NS0
"decl "+db_dir+" "+cur_dir+"/main.cpp 103 10", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 103 10", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 103 10", #check
#"decl "+db_dir+" "+cur_dir+"/main.cpp 104 5", #NS1
"decl "+db_dir+" "+cur_dir+"/main.cpp 104 10", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 104 10", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 104 10", #check
#"decl "+db_dir+" "+cur_dir+"/main.cpp 105 5", #c00
"decl "+db_dir+" "+cur_dir+"/main.cpp 105 9", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 105 9", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 105 9", #check
#"decl "+db_dir+" "+cur_dir+"/main.cpp 106 5", #c01
"decl "+db_dir+" "+cur_dir+"/main.cpp 106 9", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 106 9", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 106 9", #check
#"decl "+db_dir+" "+cur_dir+"/main.cpp 107 5", #c10
"decl "+db_dir+" "+cur_dir+"/main.cpp 107 9", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 107 9", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 107 9", #check
#"decl "+db_dir+" "+cur_dir+"/main.cpp 108 5", #c11
"decl "+db_dir+" "+cur_dir+"/main.cpp 108 9", #check
"def "+db_dir+" "+cur_dir+"/main.cpp 108 9", #check
"ref "+db_dir+" "+cur_dir+"/main.cpp 108 9", #check
"decl "+db_dir+" "+cur_dir+"/main.cpp 111 6", #namedEnum
"def "+db_dir+" "+cur_dir+"/main.cpp 111 6", #namedEnum
"ref "+db_dir+" "+cur_dir+"/main.cpp 111 6", #namedEnum
"decl "+db_dir+" "+cur_dir+"/main.cpp 112 5", #VAL2_0
"def "+db_dir+" "+cur_dir+"/main.cpp 112 5", #VAL2_0
"ref "+db_dir+" "+cur_dir+"/main.cpp 112 5", #VAL2_0
"decl "+db_dir+" "+cur_dir+"/main.cpp 113 5", #VAL2_1
"def "+db_dir+" "+cur_dir+"/main.cpp 113 5", #VAL2_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 113 5", #VAL2_1
"decl "+db_dir+" "+cur_dir+"/main.cpp 115 1", #namedEnum
"def "+db_dir+" "+cur_dir+"/main.cpp 115 1", #namedEnum
"ref "+db_dir+" "+cur_dir+"/main.cpp 115 1", #namedEnum
"decl "+db_dir+" "+cur_dir+"/main.cpp 115 15", #VAL2_1
"def "+db_dir+" "+cur_dir+"/main.cpp 115 15", #VAL2_1
"ref "+db_dir+" "+cur_dir+"/main.cpp 115 15", #VAL2_1
]
a_list = [
# 4 5
["VAL0_0|"+cur_dir+"/main.cpp|4|5| VAL0_0,"],
["VAL0_0|"+cur_dir+"/main.cpp|4|5| VAL0_0,"],
['VAL0_0|'+cur_dir+r'/main.cpp|19|35| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 5 5
["VAL0_1|"+cur_dir+"/main.cpp|5|5| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|5|5| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|19|43| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 6 5
["VAL0_2|"+cur_dir+"/main.cpp|6|5| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|6|5| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|19|51| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 7 5
["VAL0_3|"+cur_dir+"/main.cpp|7|5| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|7|5| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|19|59| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 11 5
["VAL1_0|"+cur_dir+"/main.cpp|11|5| VAL1_0,"],
["VAL1_0|"+cur_dir+"/main.cpp|11|5| VAL1_0,"],
[''],
# 12 5
["VAL1_1|"+cur_dir+"/main.cpp|12|5| VAL1_1,"],
["VAL1_1|"+cur_dir+"/main.cpp|12|5| VAL1_1,"],
[''],
# 13 5
["VAL1_2|"+cur_dir+"/main.cpp|13|5| VAL1_2,"],
["VAL1_2|"+cur_dir+"/main.cpp|13|5| VAL1_2,"],
[''],
# 14 5
["VAL1_3|"+cur_dir+"/main.cpp|14|5| VAL1_3,"],
["VAL1_3|"+cur_dir+"/main.cpp|14|5| VAL1_3,"],
[''],
# 17 3
["check|"+cur_dir+"/main.cpp|17|13|static void check()"],
["check|"+cur_dir+"/main.cpp|17|13|static void check()"],
["check|"+cur_dir+"/main.cpp|102|7| ::check();"],
# 19 35
["VAL0_0|"+cur_dir+"/main.cpp|4|5| VAL0_0,"],
["VAL0_0|"+cur_dir+"/main.cpp|4|5| VAL0_0,"],
['VAL0_0|'+cur_dir+r'/main.cpp|19|35| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 19 43
["VAL0_1|"+cur_dir+"/main.cpp|5|5| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|5|5| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|19|43| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 19 51
["VAL0_2|"+cur_dir+"/main.cpp|6|5| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|6|5| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|19|51| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 19 59
["VAL0_3|"+cur_dir+"/main.cpp|7|5| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|7|5| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|19|59| printf(":: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 22 11
["NS0|"+cur_dir+"/main.cpp|22|11|namespace NS0 {"],
["NS0|"+cur_dir+"/main.cpp|22|11|namespace NS0 {"],
[
'NS0|'+cur_dir+r'/main.cpp|98|5| NS0::C0 c00;',
'NS0|'+cur_dir+r'/main.cpp|99|5| NS0::C1 c01;',
'NS0|'+cur_dir+r'/main.cpp|103|5| NS0::check();',
],
# 24 9
["VAL0_0|"+cur_dir+"/main.cpp|24|9| VAL0_0=10,"],
["VAL0_0|"+cur_dir+"/main.cpp|24|9| VAL0_0=10,"],
['VAL0_0|'+cur_dir+r'/main.cpp|31|42| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 25 9
["VAL0_1|"+cur_dir+"/main.cpp|25|9| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|25|9| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|31|50| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 26 9
["VAL0_2|"+cur_dir+"/main.cpp|26|9| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|26|9| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|31|58| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 27 9
["VAL0_3|"+cur_dir+"/main.cpp|27|9| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|27|9| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|31|66| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 29 17
["check|"+cur_dir+"/main.cpp|29|17| static void check()"],
["check|"+cur_dir+"/main.cpp|29|17| static void check()"],
['check|'+cur_dir+r'/main.cpp|103|10| NS0::check();'],
# 31 42
["VAL0_0|"+cur_dir+"/main.cpp|24|9| VAL0_0=10,"],
["VAL0_0|"+cur_dir+"/main.cpp|24|9| VAL0_0=10,"],
['VAL0_0|'+cur_dir+r'/main.cpp|31|42| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 31 50
["VAL0_1|"+cur_dir+"/main.cpp|25|9| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|25|9| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|31|50| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 31 58
["VAL0_2|"+cur_dir+"/main.cpp|26|9| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|26|9| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|31|58| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 31 66
["VAL0_3|"+cur_dir+"/main.cpp|27|9| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|27|9| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|31|66| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 33 11
["C0|"+cur_dir+"/main.cpp|33|11| class C0 {"],
["C0|"+cur_dir+"/main.cpp|33|11| class C0 {"],
[
'C0|'+cur_dir+r'/main.cpp|46|23| class C1 : public C0',
'C0|'+cur_dir+r'/main.cpp|98|10| NS0::C0 c00;',
],
# 36 13
["VAL0_0|"+cur_dir+"/main.cpp|36|13| VAL0_0 = 20,"],
["VAL0_0|"+cur_dir+"/main.cpp|36|13| VAL0_0 = 20,"],
['VAL0_0|'+cur_dir+r'/main.cpp|43|50| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 37 13
["VAL0_1|"+cur_dir+"/main.cpp|37|13| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|37|13| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|43|58| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 38 13
["VAL0_2|"+cur_dir+"/main.cpp|38|13| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|38|13| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|43|66| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 39 13
["VAL0_3|"+cur_dir+"/main.cpp|39|13| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|39|13| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|43|74| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 41 14
["check|"+cur_dir+"/main.cpp|41|14| void check()"],
["check|"+cur_dir+"/main.cpp|41|14| void check()"],
['check|'+cur_dir+r'/main.cpp|105|9| c00.check();'],
# 43 50
["VAL0_0|"+cur_dir+"/main.cpp|36|13| VAL0_0 = 20,"],
["VAL0_0|"+cur_dir+"/main.cpp|36|13| VAL0_0 = 20,"],
['VAL0_0|'+cur_dir+r'/main.cpp|43|50| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 43 58
["VAL0_1|"+cur_dir+"/main.cpp|37|13| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|37|13| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|43|58| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 43 66
["VAL0_2|"+cur_dir+"/main.cpp|38|13| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|38|13| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|43|66| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 43 74
["VAL0_3|"+cur_dir+"/main.cpp|39|13| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|39|13| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|43|74| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 50 13
["VAL0_0|"+cur_dir+"/main.cpp|50|13| VAL0_0 = 30,"],
["VAL0_0|"+cur_dir+"/main.cpp|50|13| VAL0_0 = 30,"],
['VAL0_0|'+cur_dir+r'/main.cpp|57|50| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 51 13
["VAL0_1|"+cur_dir+"/main.cpp|51|13| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|51|13| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|57|58| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 52 13
["VAL0_2|"+cur_dir+"/main.cpp|52|13| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|52|13| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|57|66| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 53 13
["VAL0_3|"+cur_dir+"/main.cpp|53|13| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|53|13| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|57|74| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 55 14
["check|"+cur_dir+"/main.cpp|55|14| void check()"],
["check|"+cur_dir+"/main.cpp|55|14| void check()"],
['check|'+cur_dir+r'/main.cpp|106|9| c01.check();'],
# 57 50
["VAL0_0|"+cur_dir+"/main.cpp|50|13| VAL0_0 = 30,"],
["VAL0_0|"+cur_dir+"/main.cpp|50|13| VAL0_0 = 30,"],
['VAL0_0|'+cur_dir+r'/main.cpp|57|50| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 57 58
["VAL0_1|"+cur_dir+"/main.cpp|51|13| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|51|13| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|57|58| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 57 66
["VAL0_2|"+cur_dir+"/main.cpp|52|13| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|52|13| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|57|66| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 57 74
["VAL0_3|"+cur_dir+"/main.cpp|53|13| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|53|13| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|57|74| printf("NS0::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 64 9
["VAL0_0|"+cur_dir+"/main.cpp|64|9| VAL0_0 = 40,"],
["VAL0_0|"+cur_dir+"/main.cpp|64|9| VAL0_0 = 40,"],
['VAL0_0|'+cur_dir+r'/main.cpp|71|42| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 65 9
["VAL0_1|"+cur_dir+"/main.cpp|65|9| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|65|9| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|71|50| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 66 9
["VAL0_2|"+cur_dir+"/main.cpp|66|9| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|66|9| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|71|58| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 67 9
["VAL0_3|"+cur_dir+"/main.cpp|67|9| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|67|9| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|71|66| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 69 17
["check|"+cur_dir+"/main.cpp|69|17| static void check()"],
["check|"+cur_dir+"/main.cpp|69|17| static void check()"],
['check|'+cur_dir+r'/main.cpp|104|10| NS1::check();'],
# 71 42
["VAL0_0|"+cur_dir+"/main.cpp|64|9| VAL0_0 = 40,"],
["VAL0_0|"+cur_dir+"/main.cpp|64|9| VAL0_0 = 40,"],
['VAL0_0|'+cur_dir+r'/main.cpp|71|42| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 71 50
["VAL0_1|"+cur_dir+"/main.cpp|65|9| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|65|9| VAL0_1,"],
['VAL0_1|'+cur_dir+r'/main.cpp|71|50| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 71 58
["VAL0_2|"+cur_dir+"/main.cpp|66|9| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|66|9| VAL0_2,"],
['VAL0_2|'+cur_dir+r'/main.cpp|71|58| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 71 66
["VAL0_3|"+cur_dir+"/main.cpp|67|9| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|67|9| VAL0_3,"],
['VAL0_3|'+cur_dir+r'/main.cpp|71|66| printf("NS0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);'],
# 76 13
["VAL0_0|"+cur_dir+"/main.cpp|76|13| VAL0_0 = 50,"],
["VAL0_0|"+cur_dir+"/main.cpp|76|13| VAL0_0 = 50,"],
[
'VAL0_0|'+cur_dir+r'/main.cpp|83|50| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_0|'+cur_dir+r'/main.cpp|91|50| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 77 13
["VAL0_1|"+cur_dir+"/main.cpp|77|13| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|77|13| VAL0_1,"],
[
'VAL0_1|'+cur_dir+r'/main.cpp|83|58| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_1|'+cur_dir+r'/main.cpp|91|58| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 78 13
["VAL0_2|"+cur_dir+"/main.cpp|78|13| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|78|13| VAL0_2,"],
[
'VAL0_2|'+cur_dir+r'/main.cpp|83|66| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_2|'+cur_dir+r'/main.cpp|91|66| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 79 13
["VAL0_3|"+cur_dir+"/main.cpp|79|13| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|79|13| VAL0_3,"],
[
'VAL0_3|'+cur_dir+r'/main.cpp|83|74| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_3|'+cur_dir+r'/main.cpp|91|74| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 81 14
["check|"+cur_dir+"/main.cpp|81|14| void check()"],
["check|"+cur_dir+"/main.cpp|81|14| void check()"],
['check|'+cur_dir+r'/main.cpp|107|9| c10.check();'],
# 83 50
["VAL0_0|"+cur_dir+"/main.cpp|76|13| VAL0_0 = 50,"],
["VAL0_0|"+cur_dir+"/main.cpp|76|13| VAL0_0 = 50,"],
[
'VAL0_0|'+cur_dir+r'/main.cpp|83|50| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_0|'+cur_dir+r'/main.cpp|91|50| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 83 58
["VAL0_1|"+cur_dir+"/main.cpp|77|13| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|77|13| VAL0_1,"],
[
'VAL0_1|'+cur_dir+r'/main.cpp|83|58| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_1|'+cur_dir+r'/main.cpp|91|58| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 83 66
["VAL0_2|"+cur_dir+"/main.cpp|78|13| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|78|13| VAL0_2,"],
[
'VAL0_2|'+cur_dir+r'/main.cpp|83|66| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_2|'+cur_dir+r'/main.cpp|91|66| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 83 74
["VAL0_3|"+cur_dir+"/main.cpp|79|13| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|79|13| VAL0_3,"],
[
'VAL0_3|'+cur_dir+r'/main.cpp|83|74| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_3|'+cur_dir+r'/main.cpp|91|74| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 89 14
["check|"+cur_dir+"/main.cpp|89|14| void check()"],
["check|"+cur_dir+"/main.cpp|89|14| void check()"],
['check|'+cur_dir+r'/main.cpp|108|9| c11.check();'],
# 91 50
["VAL0_0|"+cur_dir+"/main.cpp|76|13| VAL0_0 = 50,"],
["VAL0_0|"+cur_dir+"/main.cpp|76|13| VAL0_0 = 50,"],
[
'VAL0_0|'+cur_dir+r'/main.cpp|83|50| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_0|'+cur_dir+r'/main.cpp|91|50| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 91 58
["VAL0_1|"+cur_dir+"/main.cpp|77|13| VAL0_1,"],
["VAL0_1|"+cur_dir+"/main.cpp|77|13| VAL0_1,"],
[
'VAL0_1|'+cur_dir+r'/main.cpp|83|58| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_1|'+cur_dir+r'/main.cpp|91|58| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 91 66
["VAL0_2|"+cur_dir+"/main.cpp|78|13| VAL0_2,"],
["VAL0_2|"+cur_dir+"/main.cpp|78|13| VAL0_2,"],
[
'VAL0_2|'+cur_dir+r'/main.cpp|83|66| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_2|'+cur_dir+r'/main.cpp|91|66| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 91 74
["VAL0_3|"+cur_dir+"/main.cpp|79|13| VAL0_3,"],
["VAL0_3|"+cur_dir+"/main.cpp|79|13| VAL0_3,"],
[
'VAL0_3|'+cur_dir+r'/main.cpp|83|74| printf("NS0::C0:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
'VAL0_3|'+cur_dir+r'/main.cpp|91|74| printf("NS1::C1:: %d, %d, %d, %d\n", VAL0_0, VAL0_1, VAL0_2, VAL0_3);',
],
# 102 7
["check|"+cur_dir+"/main.cpp|17|13|static void check()"],
["check|"+cur_dir+"/main.cpp|17|13|static void check()"],
["check|"+cur_dir+"/main.cpp|102|7| ::check();"],
# 103 10
["check|"+cur_dir+"/main.cpp|29|17| static void check()"],
["check|"+cur_dir+"/main.cpp|29|17| static void check()"],
['check|'+cur_dir+r'/main.cpp|103|10| NS0::check();'],
# 104 10
["check|"+cur_dir+"/main.cpp|69|17| static void check()"],
["check|"+cur_dir+"/main.cpp|69|17| static void check()"],
['check|'+cur_dir+r'/main.cpp|104|10| NS1::check();'],
# 105 9
["check|"+cur_dir+"/main.cpp|41|14| void check()"],
["check|"+cur_dir+"/main.cpp|41|14| void check()"],
['check|'+cur_dir+r'/main.cpp|105|9| c00.check();'],
# 106 9
["check|"+cur_dir+"/main.cpp|55|14| void check()"],
["check|"+cur_dir+"/main.cpp|55|14| void check()"],
['check|'+cur_dir+r'/main.cpp|106|9| c01.check();'],
# 107 9
["check|"+cur_dir+"/main.cpp|81|14| void check()"],
["check|"+cur_dir+"/main.cpp|81|14| void check()"],
['check|'+cur_dir+r'/main.cpp|107|9| c10.check();'],
# 108 9
["check|"+cur_dir+"/main.cpp|89|14| void check()"],
["check|"+cur_dir+"/main.cpp|89|14| void check()"],
['check|'+cur_dir+r'/main.cpp|108|9| c11.check();'],
# 111 6
["namedEnum|"+cur_dir+"/main.cpp|111|6|enum namedEnum {"],
["namedEnum|"+cur_dir+"/main.cpp|111|6|enum namedEnum {"],
["namedEnum|"+cur_dir+"/main.cpp|115|1|namedEnum e = VAL2_1;"],
# 112 5
["VAL2_0|"+cur_dir+"/main.cpp|112|5| VAL2_0,"],
["VAL2_0|"+cur_dir+"/main.cpp|112|5| VAL2_0,"],
[""],
# 113 5
["VAL2_1|"+cur_dir+"/main.cpp|113|5| VAL2_1,"],
["VAL2_1|"+cur_dir+"/main.cpp|113|5| VAL2_1,"],
["VAL2_1|"+cur_dir+"/main.cpp|115|15|namedEnum e = VAL2_1;"],
# 115 5
["namedEnum|"+cur_dir+"/main.cpp|111|6|enum namedEnum {"],
["namedEnum|"+cur_dir+"/main.cpp|111|6|enum namedEnum {"],
["namedEnum|"+cur_dir+"/main.cpp|115|1|namedEnum e = VAL2_1;"],
# 115 15
["VAL2_1|"+cur_dir+"/main.cpp|113|5| VAL2_1,"],
["VAL2_1|"+cur_dir+"/main.cpp|113|5| VAL2_1,"],
["VAL2_1|"+cur_dir+"/main.cpp|115|15|namedEnum e = VAL2_1;"],
]
err = 0
i = 0
for q in q_list:
err += common.test_one(q, a_list[i])
i+=1
if err == 0:
print "OK"
else:
print "ERR: %d"%(err)
exit(err)
| 44.282192 | 121 | 0.57412 | 6,316 | 32,326 | 2.700602 | 0.023116 | 0.175177 | 0.243888 | 0.317055 | 0.971683 | 0.970511 | 0.966465 | 0.960251 | 0.942018 | 0.826464 | 0 | 0.140523 | 0.147838 | 32,326 | 729 | 122 | 44.342936 | 0.478673 | 0.093671 | 0 | 0.478343 | 0 | 0.120527 | 0.62703 | 0.009088 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.007533 | null | null | 0.126177 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
f7d31360a847ceecd5a40f5828cf345dee4d87fa | 107 | py | Python | info/api/passport/__init__.py | googleliyang/flask_house_rent | 93299086058ef5e6e32759c15fba2ade21c992b5 | [
"Apache-2.0"
] | null | null | null | info/api/passport/__init__.py | googleliyang/flask_house_rent | 93299086058ef5e6e32759c15fba2ade21c992b5 | [
"Apache-2.0"
] | 4 | 2021-03-18T22:19:24.000Z | 2022-03-11T23:40:16.000Z | info/api/passport/__init__.py | googleliyang/flask_house_rent | 93299086058ef5e6e32759c15fba2ade21c992b5 | [
"Apache-2.0"
] | null | null | null | from flask import Blueprint
passport_print = Blueprint('passport_print', __name__)
from . import passport | 21.4 | 54 | 0.813084 | 13 | 107 | 6.230769 | 0.538462 | 0.419753 | 0.54321 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.121495 | 107 | 5 | 55 | 21.4 | 0.861702 | 0 | 0 | 0 | 0 | 0 | 0.12963 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.666667 | 0.666667 | 0 | 0.666667 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 7 |
f7f0e954bf65a676bf7fbe930d3f2673849e3da9 | 60,655 | py | Python | tracer/probes_insertion.py | pbotros/TRACER | 269e17ca27fe0fb78c30484d8685d119caab5dbb | [
"MIT"
] | null | null | null | tracer/probes_insertion.py | pbotros/TRACER | 269e17ca27fe0fb78c30484d8685d119caab5dbb | [
"MIT"
] | null | null | null | tracer/probes_insertion.py | pbotros/TRACER | 269e17ca27fe0fb78c30484d8685d119caab5dbb | [
"MIT"
] | null | null | null | # Import libraries
import math
import os
import os.path
import numpy as np
import matplotlib
# matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
import pickle
from collections import OrderedDict
from tabulate import tabulate
import mplcursors
from scipy.spatial import distance
from six.moves import input
# fit the probe
from skspatial.objects import Line
from .ObjSave import probe_obj, save_probe_insertion
from .index_tracker import IndexTracker, IndexTracker_g, IndexTracker_pi, IndexTracker_b, IndexTracker_c, IndexTracker_pi_col
class ProbesInsertion(object):
"""
Purpose
-------------
To insert probes, pre experiment.
Inputs
-------------
atlas :
probe_folder :
"""
def __init__(self, atlas, probe_folder):
self.atlas = atlas
self.probe_folder = probe_folder
if not os.path.exists(self.probe_folder):
raise Exception('Please give the correct folder.')
self.probe_colors = ['purple', 'blue', 'yellow', 'orange', 'red', 'green']
# PROBE
self.max_probe_length = 10 # maximum length of probe shank is 10mm
self.probe_widht = 0.07
self.probe_thickness = 0.024
self.probe_tip_length = 0.175
self.total_electrodes = 960 # total number of recording sites
self.electrode = 0.012 # Electrode size is 12x12 micron
self.vert_el_dist = 0.02
# There are 2 electrodes every 0.02 mm
# Lists for the points clicked in atlas and histology
self.coords_atlas = []
self.coords_probe_temp_w = []
self.coords_probe_temp_g = []
self.coords_probe_temp_p = []
self.coords_probe_temp_b = []
self.coords_probe_temp_y = []
self.coords_probe_temp_o = []
self.coords_probe_temp_r = []
# Object for clicked probes
self.coords_probe = probe_obj()
# List of probe points
self.p_probe = []
# Initialize probe counter and selecter
self.probe_counter = 0
self.probe_selecter = 0
self.probe_selecter_u = 0
self.Pp = []
self.flag_color = 0
self.flag_boundaries = 0
self.flag_names = 0
self.flag = 0
self.plane = str(input('Select the plane: coronal (c), sagittal (s), or horizontal (h): ')).lower()
# Check if the input is correct
while self.plane != 'c' and self.plane != 's' and self.plane != 'h':
print('Error: Wrong plane name \n')
self.plane = str(input('Select the plane: coronal (c), sagittal (s), or horizontal (h): ')).lower()
print('\nControls: \n')
print('--------------------------- \n')
print('scroll: move between slices \n')
print('g: add/remove gridlines \n')
print('b: add/remove name of current region \n')
print('u: add/remove viewing boundaries \n')
print('v: add/remove atlas color \n')
print('r: toggle mode where clicks are logged for probe \n')
print('n: trace a new probe \n')
print('e: save probes \n')
print('w: enable/disable probe viewer mode for current probe \n')
print('c: delete most recent probe point \n')
print('--------------------------- \n')
# Display the ATLAS
# resolution
self.dpi_atl = 25.4 / self.atlas.pixdim
# Bregma coordinates
self.textstr = 'Bregma (mm): c = %.3f, h = %.3f, s = %.3f \nBregma (voxels): c = 653, h = 440, s = 246' % (653 * self.atlas.pixdim, 440 * self.atlas.pixdim, 246 * self.atlas.pixdim)
# these are matplotlib.patch.Patch properties
self.props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
# Figure
self.fig, self.ax = plt.subplots(1, 1) #, figsize=(float(d1)/dpi_atl,float(d2)/dpi_atl), dpi=dpi_atl)
# scroll cursor
self.tracker = IndexTracker(self.ax, self.atlas.atlas_data, self.atlas.pixdim, self.plane)
self.fig.canvas.mpl_connect('scroll_event', self.tracker.onscroll)
# place a text box with bregma coordinates in bottom left in axes coords
self.ax.text(0.03, 0.03, self.textstr, transform=self.ax.transAxes, fontsize=6, verticalalignment='bottom', bbox=self.props)
if self.plane == 'c':
# dimensions
self.d1 = 512
self.d2 = 512
self.ax.format_coord = self.format_coord
elif self.plane == 's':
# dimensions
self.d2 = 1024
self.d1 = 512
self.ax.format_coord = self.format_coord
elif self.plane == 'h':
# dimensions
self.d2 = 512
self.d1 = 1024
self.ax.format_coord = self.format_coord
plt.show()
# Fix size and location of the figure window
self.mngr = plt.get_current_fig_manager()
self.mngr.window.setGeometry(600, 200, self.d2 * 2, self.d1 * 2)
self.fig.canvas.mpl_connect('key_press_event', self.on_key)
def re_load_probes(self, probe_name):
print('\nLoad probe')
c_file = open(os.path.join(self.probe_folder, probe_name + '.pkl'), "rb")
tdata = pickle.load(c_file)
self.Pp.append(tdata)
c_file.close()
self.flag = 1
print('Probe loaded')
def format_coord(self, x, y):
# display the coordinates relative to the bregma when hovering with the cursor
if self.plane == 'c':
AP = self.tracker.ind * self.atlas.pixdim - 653 * self.atlas.pixdim
ML = x - 246 * self.atlas.pixdim
Z = y - 440 * self.atlas.pixdim
if ML > 0:
return 'AP=%1.4f, ML=R%1.4f, z=%1.4f' % (AP, abs(ML), Z)
else:
return 'AP=%1.4f, ML=L%1.4f, z=%1.4f' % (AP, abs(ML), Z)
elif self.plane == 's':
AP = x - 653 * self.atlas.pixdim
ML = self.tracker.ind * self.atlas.pixdim - 246 * self.atlas.pixdim
Z = y - 440 * self.atlas.pixdim
if ML > 0:
return 'AP=%1.4f, ML=R%1.4f, z=%1.4f' % (AP, abs(ML), Z)
else:
return 'AP=%1.4f, ML=L%1.4f, z=%1.4f' % (AP, abs(ML), Z)
elif self.plane == 'h':
AP = y - 653 * self.atlas.pixdim
ML = x - 246 * self.atlas.pixdim
Z = self.tracker.ind * self.atlas.pixdim - 440 * self.atlas.pixdim
if ML > 0:
return 'AP=%1.4f, ML=R%1.4f, z=%1.4f' % (AP, abs(ML), Z)
else:
return 'AP=%1.4f, ML=L%1.4f, z=%1.4f' % (AP, abs(ML), Z)
def show_annotation(self, sel):
if self.flag_names == 1:
sel.annotation.set_visible(True)
elif self.flag_names == 0:
sel.annotation.set_visible(False)
xi, yi = sel.target / self.atlas.pixdim
if self.plane == 'c':
if np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(xi)[1]), self.tracker.ind, int(math.modf(yi)[1])], axis=1)).size:
self.Text = self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[
int(math.modf(xi)[1]), self.tracker.ind, int(math.modf(yi)[1])], axis=1))[0, 0]]
else:
# display nothing
self.Text = ' '
elif self.plane == 's':
if np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[self.tracker.ind, int(math.modf(xi)[1]), int(math.modf(yi)[1])], axis=1)).size:
self.Text = self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[
self.tracker.ind, int(math.modf(xi)[1]), int(math.modf(yi)[1])], axis=1))[0, 0]]
else:
# display nothing
self.Text = ' '
elif self.plane == 'h':
if np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(xi)[1]), int(math.modf(yi)[1]), self.tracker.ind], axis=1)).size:
self.Text = self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[
int(math.modf(xi)[1]), int(math.modf(yi)[1]), self.tracker.ind], axis=1))[0, 0]]
else:
# display nothing
self.Text = ' '
sel.annotation.set_text(self.Text)
def onclick_probe(self, event):
px, py = event.xdata, event.ydata
# assign global variable to access outside of function
if self.probe_counter == 0:
self.coords_probe_temp_w.append((px, py, self.tracker.ind))
self.p_probe.extend(
self.ax.plot(event.xdata, event.ydata, color=self.probe_colors[self.probe_counter], marker='o', markersize=2))
setattr(self.coords_probe, self.probe_colors[self.probe_counter], self.coords_probe_temp_w)
elif self.probe_counter == 1:
self.coords_probe_temp_g.append((px, py, self.tracker.ind))
self.p_probe.extend(
self.ax.plot(event.xdata, event.ydata, color=self.probe_colors[self.probe_counter], marker='o', markersize=2))
setattr(self.coords_probe, self.probe_colors[self.probe_counter], self.coords_probe_temp_g)
elif self.probe_counter == 2:
self.coords_probe_temp_p.append((px, py, self.tracker.ind))
self.p_probe.extend(
self.ax.plot(event.xdata, event.ydata, color=self.probe_colors[self.probe_counter], marker='o', markersize=2))
setattr(self.coords_probe, self.probe_colors[self.probe_counter], self.coords_probe_temp_p)
elif self.probe_counter == 3:
self.coords_probe_temp_b.append((px, py, self.tracker.ind))
self.p_probe.extend(
self.ax.plot(event.xdata, event.ydata, color=self.probe_colors[self.probe_counter], marker='o', markersize=2))
setattr(self.coords_probe, self.probe_colors[self.probe_counter], self.coords_probe_temp_b)
elif self.probe_counter == 4:
self.coords_probe_temp_y.append((px, py, self.tracker.ind))
self.p_probe.extend(
self.ax.plot(event.xdata, event.ydata, color=self.probe_colors[self.probe_counter], marker='o', markersize=2))
setattr(self.coords_probe, self.probe_colors[self.probe_counter], self.coords_probe_temp_y)
elif self.probe_counter == 5:
self.coords_probe_temp_o.append((px, py, self.tracker.ind))
self.p_probe.extend(
self.ax.plot(event.xdata, event.ydata, color=self.probe_colors[self.probe_counter], marker='o', markersize=2))
setattr(self.coords_probe, self.probe_colors[self.probe_counter], self.coords_probe_temp_o)
self.fig.canvas.draw()
return
def on_key2(self, event):
if event.key == 'n':
# add a new probe
if self.probe_counter + 1 < len(self.probe_colors):
self.probe_counter += 1
print('probe %d added (%s)' % (self.probe_counter + 1, self.probe_colors[self.probe_counter]))
else:
print('Cannot add more probes')
self.probe_counter = len(self.probe_colors)
elif event.key == 'c':
print('Delete clicked probe point')
if len(getattr(self.coords_probe, self.probe_colors[0])) != 0:
if len(getattr(self.coords_probe, self.probe_colors[self.probe_counter])) != 0:
getattr(self.coords_probe, self.probe_colors[self.probe_counter]).pop(-1) # remove the point from the list
self.p_probe[-1].remove() # remove the point from the plot
self.fig.canvas.draw()
self.p_probe.pop(-1)
elif len(getattr(self.coords_probe, self.probe_colors[self.probe_counter])) == 0:
self.probe_counter -= 1
try:
getattr(self.coords_probe, self.probe_colors[self.probe_counter]).pop(-1) # remove the point from the list
self.p_probe[-1].remove() # remove the point from the plot
self.fig.canvas.draw()
self.p_probe.pop(-1)
except:
pass
def on_key(self, event):
if event.key == 'b':
# Show the names of the regions
self.cursor = mplcursors.cursor(hover=True)
self.cursor.connect('add', lambda sel: self.show_annotation(sel))
if self.flag_names == 0:
print("Show region's name on")
self.flag_names = 1
elif self.flag_names == 1:
print("Show region's name off")
self.flag_names = 0
elif event.key == 'u':
if self.flag_boundaries == 0:
print('View boundaries on')
self.tracker4 = IndexTracker_b(self.ax, self.atlas.Edges, self.atlas.pixdim, self.plane, self.tracker.ind)
# print(self.atlas.Edges[250, self.tracker.ind, 250])
self.fig.canvas.mpl_connect('scroll_event', self.tracker4.onscroll)
# self.tracker4 = IndexTracker_c(self.ax, self.atlas.cv_plot, self.atlas.pixdim, self.plane, self.tracker.ind)
# self.fig.canvas.mpl_connect('scroll_event', self.tracker4.onscroll)
plt.show()
self.flag_boundaries = 1
elif self.flag_boundaries == 1:
print('View boundaries off')
self.fig.delaxes(self.ax)
self.ax.clear()
plt.draw()
self.fig.add_axes(self.ax)
plt.draw()
self.tracker = IndexTracker(self.ax, self.atlas.atlas_data, self.atlas.pixdim, self.plane, self.tracker.ind)
print(self.atlas.atlas_data[250, self.tracker.ind, 250])
self.fig.canvas.mpl_connect('scroll_event', self.tracker.onscroll)
plt.show()
self.flag_boundaries = 0
elif event.key == 'v':
if self.flag_color == 0:
print('View colors on')
self.tracker3 = IndexTracker_c(self.ax, self.atlas.cv_plot, self.atlas.pixdim, self.plane, self.tracker.ind)
self.fig.canvas.mpl_connect('scroll_event', self.tracker3.onscroll)
plt.show()
self.flag_color = 1
elif self.flag_color == 1:
print('View colors off')
self.fig.delaxes(self.ax)
self.ax.clear()
plt.draw()
self.fig.add_axes(self.ax)
plt.draw()
self.tracker = IndexTracker(self.ax, self.atlas.atlas_data, self.atlas.pixdim, self.plane, self.tracker.ind)
self.fig.canvas.mpl_connect('scroll_event', self.tracker.onscroll)
# self.fig.canvas.mpl_connect("motion_notify_event", self.show_annotation)
plt.show()
self.flag_color = 0
elif event.key == 'r':
print('Register probe %d' % self.probe_counter)
# Call click func
self.fig.canvas.mpl_connect('button_press_event', self.onclick_probe)
self.fig.canvas.mpl_connect('key_press_event', self.on_key2)
elif event.key == 'e':
print('\n Save probe')
# Create and save slice, clicked probes
print(self.coords_probe)
print(self.probe_counter)
P = save_probe_insertion(self.coords_probe, self.plane, self.probe_counter) # Saving the object
probe_name = 'Probe%d.pkl' % self.probe_counter
file_name = os.path.join(self.probe_folder, probe_name)
a_file = open(file_name, "wb")
pickle.dump(P, a_file)
a_file.close()
print('Probe saved')
elif event.key == 'w':
# if the probe if uploaded from a file
if self.flag == 1:
# If I have several probes
for j in range(len(self.probe_colors)):
for k in range(len(self.Pp)):
try:
PC = getattr(self.Pp[k].Probe, self.probe_colors[j])
p_x = []
p_y = []
probe_slice = []
for i in range(len(PC)):
p_x.append(PC[i][0])
p_y.append(PC[i][1])
probe_slice.append(PC[i][2])
unique_slice = list(OrderedDict.fromkeys(probe_slice))
# get the probe coordinates and the region's names
probe_x = []
probe_y = []
probe_z = []
if self.Pp[k].Plane == 'c':
for i in range(len(PC)):
probe_x.append(PC[i][0])
probe_y.append(PC[i][2] * self.atlas.pixdim)
probe_z.append(PC[i][1])
elif self.Pp[k].Plane == 's':
for i in range(len(PC)):
probe_x.append(PC[i][2] * self.atlas.pixdim)
probe_y.append(PC[i][0])
probe_z.append(PC[i][1])
elif self.Pp[k].Plane == 'h':
for i in range(len(PC)):
probe_x.append(PC[i][0])
probe_y.append(PC[i][1])
probe_z.append(PC[i][2] * self.atlas.pixdim)
pts = np.array((probe_x, probe_y, probe_z)).T
line_fit = Line.best_fit(pts)
# display the probe in a separate window
self.fig_probe, self.ax_probe = plt.subplots(1, 1)
self.trackerp = IndexTracker_pi(self.ax_probe, self.atlas.atlas_data, self.atlas.pixdim, self.Pp[k].Plane, probe_slice[0], unique_slice, p_x, p_y, self.probe_colors, self.probe_selecter_u, line_fit)
self.fig_probe.canvas.mpl_connect('scroll_event', self.trackerp.onscroll)
self.ax_probe.text(0.05, 0.95, self.textstr, transform=self.ax_probe.transAxes, fontsize=6, verticalalignment='bottom', bbox=self.props)
self.ax_probe.format_coord = self.format_coord
self.ax_probe.set_title("Probe %d viewer" % (self.probe_selecter_u + 1))
plt.show()
self.mngr_probe = plt.get_current_fig_manager()
self.mngr_probe.window.setGeometry(650, 250, self.d2 * 2, self.d1 * 2)
# get the probe coordinates and the region's names
# if no inclination in z direction
if line_fit.direction[2] == 0:
# if there is NOT inclination in the x direction
if line_fit.direction[0] == 0:
# line equations, to derive the send point of the line (aka probe)
z2 = pts[0,2]
x2 = pts[-1,0]
y2 = pts[0,1]
deg_lat = math.degrees(math.atan(line_fit.direction[0]))
deg_ant = math.degrees(math.atan(line_fit.direction[1]))
# position_at_bregma_depth
z0 = 440 * self.atlas.pixdim # correspond at the position of the bregma DV=0
x0 = pts[0,0]
y0 = pts[-1,1]
ML_position = (x0 - 246 * self.atlas.pixdim)
AP_position = (y0 - 653 * self.atlas.pixdim)
X0 = np.array([x0, y0, z0])
X2 = np.array([x2, y2, z2])
# start point for visualization (the first clicked point)
z1 = z2
x1 = pts[0,0]
y1 = pts[0,1]
X1 = np.array([x1, y1, z1])
# end point minus tip length
d = (self.probe_tip_length)
xt = x2
yt = y2-d
zt = z2
Xt = np.array([xt, yt, zt])
# get lenght of the probe
dist = np.linalg.norm(X0 - X2)
dist_check = np.linalg.norm(X0 - Xt)
# check kthat the new end point is before the end of the tip and not after
if dist_check > dist:
xt = x2
yt = y2+d
zt = z2
Xt = np.array([xt, yt, zt])
regions = []
point_along_line = []
s = int(math.modf(X1[1] / self.atlas.pixdim)[1]) # starting point
f = int(math.modf(Xt[1] / self.atlas.pixdim)[1]) # ending point
for y in range(min(s,f), max(s,f)):
x = pts[0,0] / self.atlas.pixdim
z = pts[0,2] / self.atlas.pixdim
if int(math.modf(x)[1]) > 512 or int(math.modf(y)[1]) > 1024 or int(math.modf(z)[1]) > 512:
regions.append('Clear Label')
else:
regions.append(self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(x)[1]),int(math.modf(y)[1]),int(math.modf(z)[1])], axis=1))[0,0]])
point_along_line.append([x,y,z])
# if there is inclination in the x direction
else:
# line equations, to derive the send point of the line (aka probe)
z2 = pts[0,2]
x2 = pts[-1,0]
y2 = line_fit.point[1] + ((x2 - line_fit.point[0]) / line_fit.direction[0])*line_fit.direction[1]
deg_lat = math.degrees(math.atan(line_fit.direction[0]))
deg_ant = math.degrees(math.atan(line_fit.direction[1]))
# position_at_bregma_depth
z0 = 440 * self.atlas.pixdim # correspond at the position of the bregma DV=0
x0 = pts[0,0]
y0 = line_fit.point[1]+((x0-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
ML_position = (x0-246 * self.atlas.pixdim)
AP_position = (y0-653 * self.atlas.pixdim)
X0 = np.array([x0,y0,z0])
X2 = np.array([x2,y2,z2])
# start point for visualization (the first clicked point)
z1 = z2
x1 = pts[0,0]
y1 = line_fit.point[1]+((x1-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
X1 = np.array([x1,y1,z1])
# end point minus tip length
dq = (self.probe_tip_length)**2
div = 1 + (line_fit.direction[1]/line_fit.direction[0])**2
xt = x2 + math.sqrt(dq/div)
yt = line_fit.point[1]+((xt-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
zt = z2
Xt = np.array([xt,yt,zt])
# get lenght of the probe
dist = np.linalg.norm(X0-X2)
dist_check = np.linalg.norm(X0-Xt)
# check kthat the new end point is before the end of the tip and not after
if dist_check > dist:
xt = x2 - math.sqrt(dq/div)
yt = line_fit.point[1]+((xt-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
zt = z2
Xt = np.array([xt,yt,zt])
regions = []
point_along_line = []
s = int(math.modf(X1[0] / self.atlas.pixdim)[1]) # starting point
f = int(math.modf(Xt[0] / self.atlas.pixdim)[1]) # ending point
for x in range(min(s,f), max(s,f)):
y = line_fit.point[1]/self.atlas.pixdim+((x-line_fit.point[0]/self.atlas.pixdim)/line_fit.direction[0])*line_fit.direction[1]
z = pts[0,2] / self.atlas.pixdim
if int(math.modf(x)[1]) > 512 or int(math.modf(y)[1]) > 1024 or int(math.modf(z)[1]) > 512:
regions.append('Clear Label')
else:
regions.append(self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(x)[1]),int(math.modf(y)[1]),int(math.modf(z)[1])], axis=1))[0,0]])
point_along_line.append([x,y,z])
else:
# line equations, to derive the end point of the line (aka probe)
# the last of the clicked points represent the end point of the line
z2 = pts[-1,2]
x2 = line_fit.point[0]+((z2-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
y2 = line_fit.point[1]+((z2-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
deg_lat = math.degrees(math.atan(line_fit.direction[0]))
deg_ant = math.degrees(math.atan(line_fit.direction[1]))
# position_at_bregma_depth
z0 = 440 * self.atlas.pixdim # correspond at the position of the bregma DV=0
x0 = line_fit.point[0]+((z0-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
y0 = line_fit.point[1]+((z0-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
ML_position = (x0 - 246 * self.atlas.pixdim)
AP_position = (y0 - 653 * self.atlas.pixdim)
X0 = np.array([x0,y0,z0])
X2 = np.array([x2,y2,z2])
# start point for visualization (the first clicked point)
z1 = pts[0,2]
x1 = line_fit.point[0]+((z1-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
y1 = line_fit.point[1]+((z1-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
X1 = np.array([x1,y1,z1])
# end point minus tip length
dq = (self.probe_tip_length)**2
div = 1 + (line_fit.direction[0]/line_fit.direction[2])**2 + (line_fit.direction[1]/line_fit.direction[2])**2
zt = z2 + math.sqrt(dq/div)
xt = line_fit.point[0]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
yt = line_fit.point[1]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
Xt = np.array([xt,yt,zt])
# get lenght of the probe
dist = np.linalg.norm(X0-X2)
dist_check = np.linalg.norm(X0-Xt)
# check kthat the new end point is before the end of the tip and not after
if dist_check > dist:
zt = z2 - math.sqrt(dq/div)
xt = line_fit.point[0]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
yt = line_fit.point[1]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
Xt = np.array([xt,yt,zt])
regions = []
point_along_line = []
s = int(math.modf(X1[2] / self.atlas.pixdim)[1]) # starting point
f = int(math.modf(Xt[2] / self.atlas.pixdim)[1]) # ending point
for z in range(min(s,f),max(s,f)):
x = line_fit.point[0]/self.atlas.pixdim+((z-line_fit.point[2]/self.atlas.pixdim)/line_fit.direction[2])*line_fit.direction[0]
y = line_fit.point[1]/self.atlas.pixdim+((z-line_fit.point[2]/self.atlas.pixdim)/line_fit.direction[2])*line_fit.direction[1]
regions.append(self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(x)[1]),int(math.modf(y)[1]),int(math.modf(z)[1])], axis=1))[0,0]])
point_along_line.append([x,y,z])
# avoid repetions and reverse the order
regioni = list(OrderedDict.fromkeys(regions))[::-1]
if 'Clear Label' in regioni:
regioni.remove('Clear Label')
num_el = []
indici = []
for re in regioni:
# store the index o the region to print only the color of the regions of interest
indici.append(self.atlas.labels_name.index(re))
# in the case in dont exit and then enter again the region
position = [i for i,x in enumerate(regions) if x == re]
# if there is only one point in the region
if len(position) == 1:
regional_dist = self.atlas.pixdim
else:
# first point along the line in the region
start = [element * self.atlas.pixdim for element in point_along_line[position[0]]]
# last point along the line in the region
end = [element * self.atlas.pixdim for element in point_along_line[position[-1]]]
# length of the part of the probe in the region
regional_dist = distance.euclidean(start, end)
# Number of electrodes in the region
num_el.append(round(regional_dist/self.vert_el_dist)*2)
# print insertion coordinates
print('\n---Estimated probe insertion---')
if ML_position > 0:
testo = ' ---Estimated probe insertion--- \nEntry position at DV = 0: AP = %.2f mm, ML = R%.2f mm \nInsertion distance from the above position: %.2f mm \n%.2f degrees in the anterior direction \n%.2f degrees in the lateral direction ' %(AP_position, abs(ML_position), dist, deg_ant, deg_lat)
print('Entry position at DV = 0: AP = %.2f mm, ML = R%.2f mm' % (AP_position, abs(ML_position)))
else:
testo = ' ---Estimated probe insertion--- \nEntry position at DV = 0: AP = %.2f mm, ML = L%.2f mm \nInsertion distance from the above position: %.2f mm \n%.2f degrees in the anterior direction \n%.2f degrees in the lateral direction ' %(AP_position, abs(ML_position), dist, deg_ant, deg_lat)
print('Entry position at DV = 0: AP = %.2f mm, ML = L%.2f fmm'
% (AP_position, abs(ML_position)))
print('Insertion distance from the above position: %.2f mm' % dist)
print('%.2f degrees in the anterior direction' % deg_ant)
print('%.2f degrees in the lateral direction\n' % deg_lat)
# print regions and channels
LL = [regioni, num_el]
headers = [' Regions traversed', 'Channels']
numpy_array = np.array(LL)
transpose_ll = numpy_array.T
transpose_list = transpose_ll.tolist()
print(tabulate(transpose_list, headers, floatfmt=".2f"))
if self.plane == 'c':
regioni.insert(0,' ---Regions traversed---')
if len(regioni)>16:
self.ax_probe.text(0.01, 0.26, testo, transform=self.ax_probe.transAxes, fontsize=6.5 ,verticalalignment='top', color = 'w')
B = regioni[:len(regioni)//2]
C = regioni[len(regioni)//2:]
self.ax_probe.text(0.41, 0.26, "\n".join(B), transform=self.ax_probe.transAxes, fontsize=6.5 ,verticalalignment='top', color = 'w')
self.ax_probe.text(0.76, 0.26, "\n".join(C), transform=self.ax_probe.transAxes, fontsize=6.5 ,verticalalignment='top', color = 'w')
else:
self.ax_probe.text(0.01, 0.26, testo, transform=self.ax_probe.transAxes, fontsize=9 ,verticalalignment='top', color = 'w')
self.ax_probe.text(0.51, 0.26, "\n".join(regioni), transform=self.ax_probe.transAxes, fontsize=9 ,verticalalignment='top', color = 'w')
elif self.plane == 's':
self.ax_probe.text(0.15, 0.20, testo, transform=self.ax_probe.transAxes, fontsize=11 ,verticalalignment='top', color = 'w')
regioni.insert(0,' ---Regions traversed---')
# if there are too many regions to print
if len(regioni) > 7:
B = regioni[:len(regioni)//2]
C = regioni[len(regioni)//2:]
self.ax_probe.text(0.5, 0.25, "\n".join(B), transform=self.ax_probe.transAxes, fontsize=9.5 ,verticalalignment='top', color = 'w')
self.ax_probe.text(0.74, 0.25, "\n".join(C), transform=self.ax_probe.transAxes, fontsize=9.5 ,verticalalignment='top', color = 'w')
else:
self.ax_probe.text(0.51, 0.25, "\n".join(regioni), transform=self.ax_probe.transAxes, fontsize=11 ,verticalalignment='top', color = 'w')
elif self.plane == 'h':
regioni.insert(0,' ---Regions traversed---')
# if there are too many regions to print
if len(regioni) > 7:
self.ax_probe.text(0.17, 0.22, testo, transform=self.ax_probe.transAxes, fontsize=8 ,verticalalignment='top', color = 'w')
B = regioni[:len(regioni)//2]
C = regioni[len(regioni)//2:]
self.ax_probe.text(0.01, 0.15, "\n".join(B), transform=self.ax_probe.transAxes, fontsize=6.5 ,verticalalignment='top', color = 'w')
self.ax_probe.text(0.49, 0.15, "\n".join(C), transform=self.ax_probe.transAxes, fontsize=6.4 ,verticalalignment='top', color = 'w')
else:
self.ax_probe.text(0.17, 0.22, testo, transform=self.ax_probe.transAxes, fontsize=9 ,verticalalignment='top', color = 'w')
self.ax_probe.text(0.17, 0.13, "\n".join(regioni), transform=self.ax_probe.transAxes, fontsize=9 ,verticalalignment='top', color = 'w')
# here I only color the region of interest
for i in range(len(self.atlas.labels_index)):
if i in indici:
coord = np.where(self.atlas.segmentation_data == self.atlas.labels_index[i][0])
self.atlas.cv_plot_display[coord[0],coord[1],coord[2],:] = self.atlas.labels_color[i]
# Plot
self.fig_color, self.ax_color = plt.subplots(1, 1) # to plot the region interested with colors
print(self.Pp[k].Plane)
IndexTracker_pi_col(self.ax_color, self.atlas.cv_plot_display/255, self.atlas.Edges, self.atlas.pixdim, self.Pp[k].Plane, probe_slice[0], p_x, p_y, line_fit)
plt.show()
self.mngr_col = plt.get_current_fig_manager()
self.mngr_col.window.setGeometry(650, 250, self.d2 * 2, self.d1 * 2)
self.probe_selecter_u += 1
except:
pass
else:
try:
print('\nProbe %d view mode' % (self.probe_selecter + 1))
L = getattr(self.coords_probe, self.probe_colors[self.probe_selecter])
p_x = []
p_y = []
probe_slice = []
for i in range(len(L)):
p_x.append(L[i][0])
p_y.append(L[i][1])
probe_slice.append(L[i][2])
unique_slice = list(OrderedDict.fromkeys(probe_slice))
# get the probe coordinates and the region's names
probe_x = []
probe_y = []
probe_z = []
if self.plane == 'c':
for i in range(len(L)):
probe_x.append(L[i][0])
probe_y.append(L[i][2] * self.atlas.pixdim)
probe_z.append(L[i][1])
elif self.plane == 's':
for i in range(len(L)):
probe_x.append(L[i][2] * self.atlas.pixdim)
probe_y.append(L[i][0])
probe_z.append(L[i][1])
elif self.plane == 'h':
for i in range(len(L)):
probe_x.append(L[i][0])
probe_y.append(L[i][1])
probe_z.append(L[i][2] * self.atlas.pixdim)
pts = np.array((probe_x, probe_y, probe_z)).T
# fit the probe
line_fit = Line.best_fit(pts)
# display the probe in a separate window
self.fig_probe, self.ax_probe = plt.subplots(1, 1)
self.trackerp = IndexTracker_pi(self.ax_probe, self.atlas.atlas_data, self.atlas.pixdim, self.plane, self.tracker.ind, unique_slice, p_x, p_y, self.probe_colors, self.probe_selecter, line_fit)
self.fig_probe.canvas.mpl_connect('scroll_event', self.trackerp.onscroll)
self.ax_probe.text(0.05, 0.95, self.textstr, transform=self.ax_probe.transAxes, fontsize=6, verticalalignment='bottom', bbox=self.props)
self.ax_probe.format_coord = self.format_coord
self.ax_probe.set_title("Probe %d viewer" % (self.probe_selecter + 1))
plt.show()
self.mngr_probe = plt.get_current_fig_manager()
self.mngr_probe.window.setGeometry(650, 250, self.d2 * 2, self.d1 * 2)
# if no inclination in z direction
if line_fit.direction[2] == 0:
# if there is NOT inclination in the x direction
if line_fit.direction[0] == 0:
# line equations, to derive the send point of the line (aka probe)
z2 = pts[0,2]
x2 = pts[-1,0]
y2 = pts[0,1]
deg_lat = math.degrees(math.atan(line_fit.direction[0]))
deg_ant = math.degrees(math.atan(line_fit.direction[1]))
# position_at_bregma_depth
z0 = 440 * self.atlas.pixdim # correspond at the position of the bregma DV=0
x0 = pts[0,0]
y0 = pts[-1,1]
ML_position = (x0 - 246 * self.atlas.pixdim)
AP_position = (y0 - 653 * self.atlas.pixdim)
X0 = np.array([x0,y0,z0])
X2 = np.array([x2,y2,z2])
# start point for visualization (the first clicked point)
z1 = z2
x1 = pts[0,0]
y1 = pts[0,1]
X1 = np.array([x1,y1,z1])
# end point minus tip length
d = (self.probe_tip_length)
xt = x2
yt = y2-d
zt = z2
Xt = np.array([xt,yt,zt])
# get lenght of the probe
dist = np.linalg.norm(X0-X2)
dist_check = np.linalg.norm(X0-Xt)
# check kthat the new end point is before the end of the tip and not after
if dist_check > dist:
xt = x2
yt = y2+d
zt = z2
Xt = np.array([xt,yt,zt])
regions = []
point_along_line = []
s = int(math.modf(X1[1] / self.atlas.pixdim)[1]) # starting point
f = int(math.modf(Xt[1] / self.atlas.pixdim)[1]) # ending point
for y in range(min(s,f), max(s,f)):
x = pts[0,0] / self.atlas.pixdim
z = pts[0,2] / self.atlas.pixdim
if int(math.modf(x)[1]) > 512 or int(math.modf(y)[1]) > 1024 or int(math.modf(z)[1]) > 512:
regions.append('Clear Label')
else:
regions.append(self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(x)[1]),int(math.modf(y)[1]),int(math.modf(z)[1])], axis=1))[0,0]])
point_along_line.append([x,y,z])
# if there is inclination in the x direction
else:
# line equations, to derive the send point of the line (aka probe)
z2 = pts[0,2]
x2 = pts[-1,0]
y2 = line_fit.point[1]+((x2-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
deg_lat = math.degrees(math.atan(line_fit.direction[0]))
deg_ant = math.degrees(math.atan(line_fit.direction[1]))
# position_at_bregma_depth
z0 = 440 * self.atlas.pixdim # correspond at the position of the bregma DV=0
x0 = pts[0,0]
y0 = line_fit.point[1]+((x0-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
ML_position = (x0-246 * self.atlas.pixdim)
AP_position = (y0-653 * self.atlas.pixdim)
X0 = np.array([x0,y0,z0])
X2 = np.array([x2,y2,z2])
# start point for visualization (the first clicked point)
z1 = z2
x1 = pts[0,0]
y1 = line_fit.point[1]+((x1-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
X1 = np.array([x1,y1,z1])
# end point minus tip length
dq = (self.probe_tip_length)**2
div = 1 + (line_fit.direction[1]/line_fit.direction[0])**2
xt = x2 + math.sqrt(dq/div)
yt = line_fit.point[1]+((xt-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
zt = z2
Xt = np.array([xt,yt,zt])
# get lenght of the probe
dist = np.linalg.norm(X0-X2)
dist_check = np.linalg.norm(X0-Xt)
# check kthat the new end point is before the end of the tip and not after
if dist_check > dist:
xt = x2 - math.sqrt(dq/div)
yt = line_fit.point[1]+((xt-line_fit.point[0])/line_fit.direction[0])*line_fit.direction[1]
zt = z2
Xt = np.array([xt,yt,zt])
regions = []
point_along_line = []
s = int(math.modf(X1[0]/self.atlas.pixdim)[1]) # starting point
f = int(math.modf(Xt[0]/self.atlas.pixdim)[1]) # ending point
for x in range(min(s,f), max(s,f)):
y = line_fit.point[1]/self.atlas.pixdim+((x-line_fit.point[0]/self.atlas.pixdim)/line_fit.direction[0])*line_fit.direction[1]
z = pts[0,2] / self.atlas.pixdim
if int(math.modf(x)[1]) > 512 or int(math.modf(y)[1]) > 1024 or int(math.modf(z)[1]) > 512:
regions.append('Clear Label')
else:
regions.append(self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(x)[1]),int(math.modf(y)[1]),int(math.modf(z)[1])], axis=1))[0,0]])
point_along_line.append([x,y,z])
else:
# line equations, to derive the point of the line (aka probe)
# the last of the clicked points represent the end point of the line
z2 = pts[-1,2]
x2 = line_fit.point[0]+((z2-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
y2 = line_fit.point[1]+((z2-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
deg_lat = math.degrees(math.atan(line_fit.direction[0]))
deg_ant = math.degrees(math.atan(line_fit.direction[1]))
# position_at_bregma_depth
z0 = 440 * self.atlas.pixdim # correspond at the position of the bregma DV=0
x0 = line_fit.point[0]+((z0-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
y0 = line_fit.point[1]+((z0-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
ML_position = (x0 - 246 * self.atlas.pixdim)
AP_position = (y0 - 653 * self.atlas.pixdim)
X0 = np.array([x0,y0,z0])
X2 = np.array([x2,y2,z2])
# start point for visualization (the first clicked point)
z1 = pts[0,2]
x1 = line_fit.point[0]+((z1-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
y1 = line_fit.point[1]+((z1-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
X1 = np.array([x1,y1,z1])
# end point minus tip length
dq = (self.probe_tip_length)**2
div = 1 + (line_fit.direction[0]/line_fit.direction[2])**2 + (line_fit.direction[1]/line_fit.direction[2])**2
zt = z2 + math.sqrt(dq/div)
xt = line_fit.point[0]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
yt = line_fit.point[1]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
Xt = np.array([xt,yt,zt])
# get lenght of the probe
dist = np.linalg.norm(X0-X2)
dist_check = np.linalg.norm(X0-Xt)
# check kthat the new end point is before the end of the tip and not after
if dist_check > dist:
zt = z2 - math.sqrt(dq/div)
xt = line_fit.point[0]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[0]
yt = line_fit.point[1]+((zt-line_fit.point[2])/line_fit.direction[2])*line_fit.direction[1]
Xt = np.array([xt,yt,zt])
regions = []
point_along_line = []
s = int(math.modf(X1[2] / self.atlas.pixdim)[1]) # starting point
f = int(math.modf(Xt[2] / self.atlas.pixdim)[1]) # ending point
for z in range(min(s,f),max(s,f)):
x = line_fit.point[0]/self.atlas.pixdim+((z-line_fit.point[2]/self.atlas.pixdim)/line_fit.direction[2])*line_fit.direction[0]
y = line_fit.point[1]/self.atlas.pixdim+((z-line_fit.point[2]/self.atlas.pixdim)/line_fit.direction[2])*line_fit.direction[1]
if int(math.modf(x)[1]) > 512 or int(math.modf(y)[1]) > 1024 or int(math.modf(z)[1]) > 512:
regions.append('Clear Label')
else:
regions.append(self.atlas.labels_name[np.argwhere(np.all(self.atlas.labels_index == self.atlas.segmentation_data[int(math.modf(x)[1]),int(math.modf(y)[1]),int(math.modf(z)[1])], axis=1))[0,0]])
point_along_line.append([x,y,z])
# avoid repetions and reverse the order
regioni = list(OrderedDict.fromkeys(regions))[::-1]
if 'Clear Label' in regioni:
regioni.remove('Clear Label')
num_el = []
indici = []
for re in regioni:
# store the index o the region to print only the color of the regions of interest
indici.append(self.atlas.labels_name.index(re))
# in the case in dont exit and then enter again the region
position = [i for i,x in enumerate(regions) if x == re]
# if there is only one point in the region
if len(position) == 1:
regional_dist = self.atlas.pixdim
else:
# first point along the line in the region
start = [element * self.atlas.pixdim for element in point_along_line[position[0]]]
# last point along the line in the region
end = [element * self.atlas.pixdim for element in point_along_line[position[-1]]]
# length of the part of the probe in the region
regional_dist = distance.euclidean(start, end)
# Number of electrodes in the region
num_el.append(round(regional_dist/self.vert_el_dist)*2)
# print insertion coordinates
print('\n---Estimated probe insertion---')
if ML_position > 0:
testo = ' ---Estimated probe insertion--- \nEntry position at DV = 0: AP = %.2f mm, ML = R%.2f mm \nInsertion distance from the above position: %.2f mm \n%.2f degrees in the anterior direction \n%.2f degrees in the lateral direction ' % (AP_position, abs(ML_position), dist, deg_ant, deg_lat)
print('Entry position at DV = 0: AP = %.2f mm, ML = R%.2f mm' % (AP_position, abs(ML_position)))
else:
testo = ' ---Estimated probe insertion--- \nEntry position at DV = 0: AP = %.2f mm, ML = L%.2f mm \nInsertion distance from the above position: %.2f mm \n%.2f degrees in the anterior direction \n%.2f degrees in the lateral direction ' % (AP_position, abs(ML_position), dist, deg_ant, deg_lat)
print('Entry position at DV = 0: AP = %.2f mm, ML = L%.2f fmm' % (AP_position, abs(ML_position)))
print('Insertion distance from the above position: %.2f mm' % dist)
print('%.2f degrees in the anterior direction' % deg_ant)
print('%.2f degrees in the lateral direction\n' % deg_lat)
# print regions and number of channels
LL = [regioni, num_el]
headers = [' Regions traversed', 'Channels']
numpy_array = np.array(LL)
transpose_ll = numpy_array.T
transpose_list = transpose_ll.tolist()
print(tabulate(transpose_list, headers, floatfmt=".2f"))
if self.plane == 'c':
# list of regions
regioni.insert(0,' ---Regions traversed---')
if len(regioni) > 16:
self.ax_probe.text(0.01, 0.26, testo, transform=self.ax_probe.transAxes, fontsize=6.5, verticalalignment='top', color='w')
B = regioni[:len(regioni)//2]
C = regioni[len(regioni)//2:]
self.ax_probe.text(0.41, 0.26, "\n".join(B), transform=self.ax_probe.transAxes, fontsize=6.5, verticalalignment='top', color='w')
self.ax_probe.text(0.76, 0.26, "\n".join(C), transform=self.ax_probe.transAxes, fontsize=6.5, verticalalignment='top', color='w')
else:
self.ax_probe.text(0.01, 0.26, testo, transform=self.ax_probe.transAxes, fontsize=9, verticalalignment='top', color='w')
self.ax_probe.text(0.51, 0.26, "\n".join(regioni), transform=self.ax_probe.transAxes, fontsize=9, verticalalignment='top', color='w')
elif self.plane == 's':
self.ax_probe.text(0.15, 0.20, testo, transform=self.ax_probe.transAxes, fontsize=11, verticalalignment='top', color='w')
regioni.insert(0,' ---Regions traversed---')
# if there are too many regions to print
if len(regioni) > 7:
B = regioni[:len(regioni)//2]
C = regioni[len(regioni)//2:]
self.ax_probe.text(0.5, 0.25, "\n".join(B), transform=self.ax_probe.transAxes, fontsize=9.5, verticalalignment='top', color='w')
self.ax_probe.text(0.74, 0.25, "\n".join(C), transform=self.ax_probe.transAxes, fontsize=9.5, verticalalignment='top', color='w')
else:
self.ax_probe.text(0.51, 0.25, "\n".join(regioni), transform=self.ax_probe.transAxes, fontsize=11, verticalalignment='top', color='w')
elif self.plane == 'h':
regioni.insert(0,' ---Regions traversed---')
# if there are too many regions to print
if len(regioni) > 7:
self.ax_probe.text(0.17, 0.22, testo, transform=self.ax_probe.transAxes, fontsize=8, verticalalignment='top', color='w')
B = regioni[:len(regioni) // 2]
C = regioni[len(regioni) // 2:]
self.ax_probe.text(0.01, 0.15, "\n".join(B), transform=self.ax_probe.transAxes, fontsize=6.5, verticalalignment='top', color='w')
self.ax_probe.text(0.49, 0.15, "\n".join(C), transform=self.ax_probe.transAxes, fontsize=6.4, verticalalignment='top', color='w')
else:
self.ax_probe.text(0.17, 0.22, testo, transform=self.ax_probe.transAxes, fontsize=9, verticalalignment='top', color='w')
self.ax_probe.text(0.17, 0.13, "\n".join(regioni), transform=self.ax_probe.transAxes, fontsize=9, verticalalignment='top', color='w')
# here I only color the region of interest
for i in range(len(self.atlas.labels_index)):
if i in indici:
coord = np.where(self.atlas.segmentation_data == self.atlas.labels_index[i][0])
self.atlas.cv_plot_display[coord[0],coord[1],coord[2],:] = self.atlas.labels_color[i]
# Plot
self.fig_color, self.ax_color = plt.subplots(1, 1) # to plot the region interested with colors
IndexTracker_pi_col(self.ax_color, self.atlas.cv_plot_display / 255, self.atlas.Edges, self.atlas.pixdim, self.plane, self.tracker.ind, p_x, p_y, line_fit)
plt.show()
self.mngr_col = plt.get_current_fig_manager()
self.mngr_col.window.setGeometry(650, 250, self.d2 * 2, self.d1 * 2)
self.probe_selecter += 1
except:
print('No more probes to visualize')
pass
| 65.080472 | 334 | 0.480834 | 7,208 | 60,655 | 3.929245 | 0.067148 | 0.043994 | 0.056493 | 0.027611 | 0.853365 | 0.832639 | 0.8224 | 0.815726 | 0.808312 | 0.800756 | 0 | 0.036402 | 0.40488 | 60,655 | 931 | 335 | 65.150376 | 0.748206 | 0.090265 | 0 | 0.722074 | 0 | 0.006649 | 0.062143 | 0.000982 | 0 | 0 | 0 | 0 | 0 | 1 | 0.009309 | false | 0.003989 | 0.019947 | 0 | 0.039894 | 0.06383 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7903f0e8cbf52ac530b12e8b6192b08a3c4a90f1 | 82,016 | py | Python | test/integration/component/test_stopped_vm.py | ksowmya/cloudstack-1 | f8f779158da056be7da669884ae4ddd109cec044 | [
"Apache-2.0"
] | 1 | 2020-03-27T22:21:20.000Z | 2020-03-27T22:21:20.000Z | test/integration/component/test_stopped_vm.py | ksowmya/cloudstack-1 | f8f779158da056be7da669884ae4ddd109cec044 | [
"Apache-2.0"
] | null | null | null | test/integration/component/test_stopped_vm.py | ksowmya/cloudstack-1 | f8f779158da056be7da669884ae4ddd109cec044 | [
"Apache-2.0"
] | 1 | 2019-12-26T07:16:06.000Z | 2019-12-26T07:16:06.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 for stopped Virtual Maschine life cycle
"""
#Import Local Modules
import marvin
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.remoteSSHClient import remoteSSHClient
from marvin.integration.lib.utils import *
from marvin.integration.lib.base import *
from marvin.integration.lib.common import *
#Import System modules
import time
class Services:
"""Test Stopped VM Life Cycle Services
"""
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended in create account to
# ensure unique username generated each time
"password": "password",
},
"virtual_machine":
{
"displayname": "testserver",
"username": "root", # VM creds for SSH
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"service_offering":
{
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100, # in MHz
"memory": 128, # In MBs
},
"disk_offering": {
"displaytext": "Tiny volume",
"name": "Tiny volume",
"disksize": 1
},
"volume": {
"diskname": "DataDisk",
"url": 'http://download.cloud.com/releases/2.0.0/UbuntuServer-10-04-64bit.vhd.bz2',
"format": 'VHD'
},
"iso": # ISO settings for Attach/Detach ISO tests
{
"displaytext": "Test ISO",
"name": "testISO",
"url": "http://people.apache.org/~tsp/dummy.iso",
# Source URL where ISO is located
"ostype": 'CentOS 5.3 (64-bit)',
"mode": 'HTTP_DOWNLOAD', # Downloading existing ISO
},
"template": {
"url": "http://download.cloud.com/releases/2.0.0/UbuntuServer-10-04-64bit.vhd.bz2",
"hypervisor": 'XenServer',
"format": 'VHD',
"isfeatured": True,
"ispublic": True,
"isextractable": True,
"displaytext": "Cent OS Template",
"name": "Cent OS Template",
"ostype": 'CentOS 5.3 (64-bit)',
"templatefilter": 'self',
"passwordenabled": True,
},
"sleep": 60,
"timeout": 10,
#Migrate VM to hostid
"ostype": 'CentOS 5.3 (64-bit)',
# CentOS 5.3 (64-bit)
}
class TestDeployVM(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestDeployVM,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
# Create service offerings, disk offerings etc
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.disk_offering = DiskOffering.create(
cls.api_client,
cls.services["disk_offering"]
)
# Cleanup
cls._cleanup = [
cls.service_offering,
cls.disk_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.services = Services().services
self.services["virtual_machine"]["zoneid"] = self.zone.id
self.services["iso"]["zoneid"] = self.zone.id
self.services["virtual_machine"]["template"] = self.template.id
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup = [self.account]
return
def tearDown(self):
try:
self.debug("Cleaning up the resources")
cleanup_resources(self.apiclient, self.cleanup)
self.debug("Cleanup complete!")
except Exception as e:
self.debug("Warning! Exception in tearDown: %s" % e)
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_01_deploy_vm_no_startvm(self):
"""Test Deploy Virtual Machine with no startVM parameter
"""
# Validate the following:
# 1. deploy Vm without specifying the startvm parameter
# 2. Should be able to login to the VM.
# 3. listVM command should return the deployed VM.State of this VM
# should be "Running".
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id,
mode=self.zone.networktype
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in Running state after deployment"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_02_deploy_vm_startvm_true(self):
"""Test Deploy Virtual Machine with startVM=true parameter
"""
# Validate the following:
# 1. deploy Vm with the startvm=true
# 2. Should be able to login to the VM.
# 3. listVM command should return the deployed VM.State of this VM
# should be "Running".
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=True,
diskofferingid=self.disk_offering.id,
mode=self.zone.networktype
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in Running state after deployment"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_03_deploy_vm_startvm_false(self):
"""Test Deploy Virtual Machine with startVM=false parameter
"""
# Validate the following:
# 1. deploy Vm with the startvm=false
# 2. Should not be able to login to the VM.
# 3. listVM command should return the deployed VM.State of this VM
# should be "Stopped".
# 4. Check listRouters call for that account. List routers should
# return empty response
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False,
diskofferingid=self.disk_offering.id,
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after deployment with startvm=false"
)
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
routers,
None,
"List routers should return empty response"
)
self.debug("Destroying instance: %s" % self.virtual_machine.name)
self.virtual_machine.delete(self.apiclient)
self.debug("Instance is destroyed!")
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.debug("Instance destroyed..waiting till expunge interval")
interval = list_configurations(
self.apiclient,
name='expunge.interval'
)
delay = list_configurations(
self.apiclient,
name='expunge.delay'
)
# Sleep to ensure that all resources are deleted
time.sleep((int(interval[0].value) + int(delay[0].value)))
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.assertEqual(
list_vm_response,
None,
"Check list response returns a valid list"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_04_deploy_startvm_false_attach_volume(self):
"""Test Deploy Virtual Machine with startVM=false and attach volume
"""
# Validate the following:
# 1. deploy Vm with the startvm=false. Attach volume to the instance
# 2. listVM command should return the deployed VM.State of this VM
# should be "Stopped".
# 3. Attach volume should be successful
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False,
diskofferingid=self.disk_offering.id,
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after deployment with startvm=false"
)
self.debug("Creating a volume in account: %s" %
self.account.name)
volume = Volume.create(
self.apiclient,
self.services["volume"],
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid,
diskofferingid=self.disk_offering.id
)
self.debug("Created volume in account: %s" % self.account.name)
self.debug("Attaching volume to instance: %s" %
self.virtual_machine.name)
try:
self.virtual_machine.attach_volume(self.apiclient, volume)
except Exception as e:
self.fail("Attach volume failed!")
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_05_deploy_startvm_false_change_so(self):
"""Test Deploy Virtual Machine with startVM=false and change service offering
"""
# Validate the following:
# 1. deploy Vm with the startvm=false. Attach volume to the instance
# 2. listVM command should return the deployed VM.State of this VM
# should be "Stopped".
# 4. Change service offering
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False,
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after deployment with startvm=false"
)
medium_service_off = ServiceOffering.create(
self.apiclient,
self.services["service_offering"]
)
self.cleanup.append(medium_service_off)
self.debug("Changing service offering for instance: %s" %
self.virtual_machine.name)
try:
self.virtual_machine.change_service_offering(
self.apiclient,
medium_service_off.id
)
except Exception as e:
self.fail("Change service offering failed: %s" % e)
self.debug("Starting the instance: %s" % self.virtual_machine.name)
self.virtual_machine.start(self.apiclient)
self.debug("Instance: %s started" % self.virtual_machine.name)
listedvm = VirtualMachine.list(self.apiclient, id=self.virtual_machine.id)
self.assert_(isinstance(listedvm, list))
self.assert_(len(listedvm) > 0)
self.assertEqual(listedvm[0].serviceofferingid, medium_service_off.id, msg="VM did not change service offering")
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_06_deploy_startvm_attach_detach(self):
"""Test Deploy Virtual Machine with startVM=false and
attach detach volumes
"""
# Validate the following:
# 1. deploy Vm with the startvm=false. Attach volume to the instance
# 2. listVM command should return the deployed VM.State of this VM
# should be "Stopped".
# 3. Attach volume should be successful
# 4. Detach volume from instance. Detach should be successful
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False,
diskofferingid=self.disk_offering.id,
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after deployment with startvm=false"
)
self.debug("Creating a volume in account: %s" %
self.account.name)
volume = Volume.create(
self.apiclient,
self.services["volume"],
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid,
diskofferingid=self.disk_offering.id
)
self.debug("Created volume in account: %s" % self.account.name)
self.debug("Attaching volume to instance: %s" %
self.virtual_machine.name)
try:
self.virtual_machine.attach_volume(self.apiclient, volume)
except Exception as e:
self.fail("Attach volume failed!")
self.debug("Detaching the disk: %s" % volume.name)
self.virtual_machine.detach_volume(self.apiclient, volume)
self.debug("Datadisk %s detached!" % volume.name)
volumes = Volume.list(
self.apiclient,
virtualmachineid=self.virtual_machine.id,
type='DATADISK',
id=volume.id,
listall=True
)
self.assertEqual(
volumes,
None,
"List Volumes should not list any volume for instance"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_07_deploy_startvm_attach_iso(self):
"""Test Deploy Virtual Machine with startVM=false and attach ISO
"""
# Validate the following:
# 1. deploy Vm with the startvm=false. Attach volume to the instance
# 2. listVM command should return the deployed VM.State of this VM
# should be "Stopped".
# 3. Attach ISO to the instance. Attach ISO should be successful
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False,
diskofferingid=self.disk_offering.id,
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after deployment with startvm=false"
)
self.debug("Registering a ISO in account: %s" %
self.account.name)
iso = Iso.create(
self.apiclient,
self.services["iso"],
account=self.account.name,
domainid=self.account.domainid
)
self.debug("Successfully created ISO with ID: %s" % iso.id)
try:
iso.download(self.apiclient)
self.cleanup.append(iso)
except Exception as e:
self.fail("Exception while downloading ISO %s: %s"\
% (iso.id, e))
self.debug("Attach ISO with ID: %s to VM ID: %s" % (
iso.id,
self.virtual_machine.id
))
try:
self.virtual_machine.attach_iso(self.apiclient, iso)
except Exception as e:
self.fail("Attach ISO failed!")
vms = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine.id,
listall=True
)
self.assertEqual(
isinstance(vms, list),
True,
"List vms should return a valid list"
)
vm = vms[0]
self.assertEqual(
vm.isoid,
iso.id,
"The ISO status should be reflected in list Vm call"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_08_deploy_attached_volume(self):
"""Test Deploy Virtual Machine with startVM=false and attach volume already attached to different machine
"""
# Validate the following:
# 1. deploy Vm with the startvm=false. Attach volume to the instance
# 2. listVM command should return the deployed VM.State of this VM
# should be "Stopped".
# 3. Create an instance with datadisk attached to it. Detach DATADISK
# 4. Attach the volume to first virtual machine.
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False,
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine_1.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine_1.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after deployment with startvm=false"
)
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine_2.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine_2.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in Stopped state after deployment with startvm=false"
)
self.debug(
"Fetching DATADISK details for instance: %s" %
self.virtual_machine_2.name)
volumes = Volume.list(
self.apiclient,
type='DATADISK',
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(volumes, list),
True,
"List volumes should return a valid list"
)
volume = volumes[0]
self.debug("Detaching the disk: %s" % volume.name)
try:
self.virtual_machine_2.detach_volume(self.apiclient, volume)
self.debug("Datadisk %s detached!" % volume.name)
except Exception as e:
self.fail("Detach volume failed!")
self.debug("Attaching volume to instance: %s" %
self.virtual_machine_1.name)
try:
self.virtual_machine_1.attach_volume(self.apiclient, volume)
except Exception as e:
self.fail("Attach volume failed with %s!" % e)
volumes = Volume.list(
self.apiclient,
virtualmachineid=self.virtual_machine_1.id,
type='DATADISK',
id=volume.id,
listall=True
)
self.assertNotEqual(
volumes,
None,
"List Volumes should not list any volume for instance"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_09_stop_vm_migrate_vol(self):
"""Test Stopped Virtual Machine's ROOT volume migration
"""
# Validate the following:
# 1. deploy Vm with startvm=true
# 2. Should not be able to login to the VM.
# 3. listVM command should return the deployed VM.State of this VM
# should be "Running".
# 4. Stop the vm
# 5.list primary storages in the cluster , should be more than one
# 6.Migrate voluem to another available primary storage
clusters = Cluster.list(
self.apiclient,
zoneid = self.zone.id
)
self.assertEqual(
isinstance(clusters, list),
True,
"Check list response returns a valid list"
)
i = 0
for cluster in clusters :
storage_pools = StoragePool.list(
self.apiclient,
clusterid = cluster.id
)
if len(storage_pools) > 1 :
self.cluster_id = cluster.id
i += 1
break
if i == 0 :
self.skipTest("No cluster with more than one primary storage pool to perform migrate volume test")
hosts = Host.list(
self.apiclient,
clusterid = self.cluster_id
)
self.assertEqual(
isinstance(hosts, list),
True,
"Check list response returns a valid list"
)
host = hosts[0]
self.debug("Deploying instance on host: %s" % host.id)
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id,
hostid=host.id,
mode=self.zone.networktype
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in Running state after deployment"
)
self.debug("Stopping instance: %s" % self.virtual_machine.name)
self.virtual_machine.stop(self.apiclient)
self.debug("Instance is stopped!")
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after stoping vm"
)
volumes = Volume.list(
self.apiclient,
virtualmachineid=self.virtual_machine.id,
type='ROOT',
listall=True
)
self.assertEqual(
isinstance(volumes, list),
True,
"Check volume list response returns a valid list"
)
vol_response = volumes[0]
#get the storage name in which volume is stored
storage_name = vol_response.storage
storage_pools = StoragePool.list(
self.apiclient,
clusterid = self.cluster_id
)
#Get storage pool to migrate volume
for spool in storage_pools:
if spool.name == storage_name:
continue
else:
self.storage_id = spool.id
self.storage_name = spool.name
break
self.debug("Migrating volume to storage pool: %s" % self.storage_name)
Volume.migrate(
self.apiclient,
storageid = self.storage_id,
volumeid = vol_response.id
)
volume = Volume.list(
self.apiclient,
virtualmachineid=self.virtual_machine.id,
type='ROOT',
listall=True
)
self.assertEqual(
volume[0].storage,
self.storage_name,
"Check volume migration response")
return
class TestDeployHaEnabledVM(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestDeployHaEnabledVM,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
# Create service, disk offerings etc
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
offerha=True
)
cls.disk_offering = DiskOffering.create(
cls.api_client,
cls.services["disk_offering"]
)
# Cleanup
cls._cleanup = [
cls.service_offering,
cls.disk_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.services = Services().services
self.services["virtual_machine"]["zoneid"] = self.zone.id
self.services["virtual_machine"]["template"] = self.template.id
self.services["iso"]["zoneid"] = self.zone.id
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup = [self.account]
return
def tearDown(self):
try:
self.debug("Cleaning up the resources")
cleanup_resources(self.apiclient, self.cleanup)
self.debug("Cleanup complete!")
except Exception as e:
self.debug("Warning! Exception in tearDown: %s" % e)
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_01_deploy_ha_vm_startvm_false(self):
"""Test Deploy HA enabled Virtual Machine with startvm=false
"""
# Validate the following:
# 1. deployHA enabled Vm with the startvm parameter = false
# 2. listVM command should return the deployed VM. State of this VM
# should be "Created".
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id,
startvm=False
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Stopped state after deployment"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_02_deploy_ha_vm_from_iso(self):
"""Test Deploy HA enabled Virtual Machine from ISO
"""
# Validate the following:
# 1. deployHA enabled Vm using ISO with the startvm parameter=true
# 2. listVM command should return the deployed VM. State of this VM
# should be "Running".
self.iso = Iso.create(
self.apiclient,
self.services["iso"],
account=self.account.name,
domainid=self.account.domainid
)
try:
# Dowanload the ISO
self.iso.download(self.apiclient)
self.cleanup.append(self.iso)
except Exception as e:
raise Exception("Exception while downloading ISO %s: %s"\
% (self.iso.id, e))
self.debug("Registered ISO: %s" % self.iso.name)
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
templateid=self.iso.id,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id,
startvm=True
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in Running state after deployment"
)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_03_deploy_ha_vm_iso_startvm_false(self):
"""Test Deploy HA enabled Virtual Machine from ISO with startvm=false
"""
# Validate the following:
# 1. deployHA enabled Vm using ISO with the startvm parameter=false
# 2. listVM command should return the deployed VM. State of this VM
# should be "Stopped".
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id,
startvm=False
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in Running state after deployment"
)
return
class TestRouterStateAfterDeploy(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestRouterStateAfterDeploy,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
# Create service offerings, disk offerings etc
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.disk_offering = DiskOffering.create(
cls.api_client,
cls.services["disk_offering"]
)
# Cleanup
cls._cleanup = [
cls.service_offering,
cls.disk_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.services = Services().services
self.services["virtual_machine"]["zoneid"] = self.zone.id
self.services["virtual_machine"]["template"] = self.template.id
self.services["iso"]["zoneid"] = self.zone.id
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup = [self.account]
return
def tearDown(self):
try:
self.debug("Cleaning up the resources")
cleanup_resources(self.apiclient, self.cleanup)
self.debug("Cleanup complete!")
except Exception as e:
self.debug("Warning! Exception in tearDown: %s" % e)
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_01_deploy_vm_no_startvm(self):
"""Test Deploy Virtual Machine with no startVM parameter
"""
# Validate the following:
# 1. deploy Vm without specifying the startvm parameter
# 2. Should be able to login to the VM.
# 3. listVM command should return the deployed VM.State of this VM
# should be "Running".
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine_1 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id,
startvm=False
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine_1.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine_1.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in stopped state after deployment"
)
self.debug("Checking the router state after VM deployment")
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
routers,
None,
"List routers should return empty response"
)
self.debug(
"Deploying another instance (startvm=true) in the account: %s" %
self.account.name)
self.virtual_machine_2 = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
diskofferingid=self.disk_offering.id,
startvm=True
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine_2.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine_2.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in Running state after deployment"
)
self.debug("Checking the router state after VM deployment")
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(routers, list),
True,
"List routers should not return empty response"
)
for router in routers:
self.debug("Router state: %s" % router.state)
self.assertEqual(
router.state,
"Running",
"Router should be in running state when instance is running in the account"
)
self.debug("Destroying the running VM:%s" %
self.virtual_machine_2.name)
self.virtual_machine_2.delete(self.apiclient)
self.debug("Instance destroyed..waiting till expunge interval")
interval = list_configurations(
self.apiclient,
name='expunge.interval'
)
delay = list_configurations(
self.apiclient,
name='expunge.delay'
)
# Sleep to ensure that all resources are deleted
time.sleep((int(interval[0].value) + int(delay[0].value)) * 2)
self.debug("Checking the router state after VM deployment")
routers = Router.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertNotEqual(
routers,
None,
"Router should get deleted after expunge delay+wait"
)
return
class TestDeployVMBasicZone(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestDeployVMBasicZone,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
# Create service offerings, disk offerings etc
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.disk_offering = DiskOffering.create(
cls.api_client,
cls.services["disk_offering"]
)
# Cleanup
cls._cleanup = [
cls.service_offering,
cls.disk_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.services = Services().services
self.services["virtual_machine"]["zoneid"] = self.zone.id
self.services["iso"]["zoneid"] = self.zone.id
self.services["virtual_machine"]["template"] = self.template.id
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup = [self.account]
return
def tearDown(self):
try:
self.debug("Cleaning up the resources")
cleanup_resources(self.apiclient, self.cleanup)
self.debug("Cleanup complete!")
except Exception as e:
self.debug("Warning! Exception in tearDown: %s" % e)
class TestDeployVMFromTemplate(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestDeployVMFromTemplate,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
# Create service, disk offerings etc
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"],
offerha=True
)
cls.disk_offering = DiskOffering.create(
cls.api_client,
cls.services["disk_offering"]
)
# Cleanup
cls._cleanup = [
cls.service_offering,
cls.disk_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.services = Services().services
self.services["virtual_machine"]["zoneid"] = self.zone.id
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.template = Template.register(
self.apiclient,
self.services["template"],
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid
)
try:
self.template.download(self.apiclient)
except Exception as e:
raise Exception("Template download failed: %s" % e)
self.cleanup = [self.account]
return
def tearDown(self):
try:
self.debug("Cleaning up the resources")
cleanup_resources(self.apiclient, self.cleanup)
self.debug("Cleanup complete!")
except Exception as e:
self.debug("Warning! Exception in tearDown: %s" % e)
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_deploy_vm_password_enabled(self):
"""Test Deploy Virtual Machine with startVM=false & enabledpassword in
template
"""
# Validate the following:
# 1. Create the password enabled template
# 2. Deploy Vm with this template and passing startvm=false
# 3. Start VM. Deploy VM should be successful and it should be in Up
# and running state
self.debug("Deploying instance in the account: %s" %
self.account.name)
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
templateid=self.template.id,
startvm=False,
)
self.debug("Deployed instance in account: %s" %
self.account.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Stopped",
"VM should be in stopped state after deployment"
)
self.debug("Starting the instance: %s" % self.virtual_machine.name)
self.virtual_machine.start(self.apiclient)
self.debug("Started the instance: %s" % self.virtual_machine.name)
list_vm_response = list_virtual_machines(
self.apiclient,
id=self.virtual_machine.id
)
self.debug(
"Verify listVirtualMachines response for virtual machine: %s" \
% self.virtual_machine.id
)
self.assertEqual(
isinstance(list_vm_response, list),
True,
"Check list response returns a valid list"
)
vm_response = list_vm_response[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in running state after deployment"
)
return
class TestVMAccountLimit(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestVMAccountLimit,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
# Create Account, VMs etc
cls.account = Account.create(
cls.api_client,
cls.services["account"],
domainid=cls.domain.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [
cls.service_offering,
cls.account
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_vm_per_account(self):
"""Test VM limit per account
"""
# Validate the following
# 1. Set the resource limit for VM per account.
# 2. Deploy VMs more than limit in that account.
# 3. AIP should error out
self.debug(
"Updating instance resource limit for account: %s" %
self.account.name)
# Set usage_vm=1 for Account 1
update_resource_limit(
self.apiclient,
0, # Instance
account=self.account.name,
domainid=self.account.domainid,
max=1
)
self.debug(
"Deploying VM instance in account: %s" %
self.account.name)
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False
)
# Verify VM state
self.assertEqual(
virtual_machine.state,
'Stopped',
"Check VM state is Running or not"
)
# Exception should be raised for second instance (account_1)
with self.assertRaises(Exception):
VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False
)
return
class TestUploadAttachVolume(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestUploadAttachVolume,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
# Create Account, VMs etc
cls.account = Account.create(
cls.api_client,
cls.services["account"],
domainid=cls.domain.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [
cls.service_offering,
cls.account
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
#Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags = ["advanced", "eip", "advancedns", "basic", "sg"])
def test_upload_attach_volume(self):
"""Test Upload volume and attach to VM in stopped state
"""
# Validate the following
# 1. Upload the volume using uploadVolume API call
# 2. Deploy VM with startvm=false.
# 3. Attach the volume to the deployed VM in step 2
self.debug(
"Uploading the volume: %s" %
self.services["volume"]["diskname"])
try:
volume = Volume.upload(
self.apiclient,
self.services["volume"],
zoneid=self.zone.id,
account=self.account.name,
domainid=self.account.domainid
)
self.debug("Uploading the volume: %s" % volume.name)
volume.wait_for_upload(self.apiclient)
self.debug("Volume: %s uploaded successfully")
except Exception as e:
self.fail("Failed to upload the volume: %s" % e)
self.debug(
"Deploying VM instance in account: %s" %
self.account.name)
virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
startvm=False
)
# Verify VM state
self.assertEqual(
virtual_machine.state,
'Stopped',
"Check VM state is Running or not"
)
virtual_machine.attach_volume(self.apiclient, volume)
return
class TestDeployOnSpecificHost(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(
TestDeployOnSpecificHost,
cls
).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
#Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.account = Account.create(
self.apiclient,
self.services["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = []
return
def tearDown(self):
try:
self.account.delete(self.apiclient)
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["advanced", "advancedns", "simulator",
"api", "basic", "eip", "sg"])
def test_deployVmOnGivenHost(self):
"""Test deploy VM on specific host
"""
# Steps for validation
# 1. as admin list available hosts that are Up
# 2. deployVM with hostid=above host
# 3. listVirtualMachines
# 4. destroy VM
# Validate the following
# 1. listHosts returns at least one host in Up state
# 2. VM should be in Running
# 3. VM should be on the host that it was deployed on
hosts = Host.list(
self.apiclient,
zoneid=self.zone.id,
type='Routing',
state='Up',
listall=True
)
self.assertEqual(
isinstance(hosts, list),
True,
"CS should have atleast one host Up and Running"
)
host = hosts[0]
self.debug("Deploting VM on host: %s" % host.name)
try:
vm = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id,
hostid=host.id
)
self.debug("Deploy VM succeeded")
except Exception as e:
self.fail("Deploy VM failed with exception: %s" % e)
self.debug("Cheking the state of deployed VM")
vms = VirtualMachine.list(
self.apiclient,
id=vm.id,
listall=True,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(vms, list),
True,
"List Vm should return a valid response"
)
vm_response = vms[0]
self.assertEqual(
vm_response.state,
"Running",
"VM should be in running state after deployment"
)
self.assertEqual(
vm_response.hostid,
host.id,
"Host id where VM is deployed should match"
)
return
| 41.317884 | 120 | 0.452668 | 6,699 | 82,016 | 5.444843 | 0.067025 | 0.060644 | 0.041947 | 0.027142 | 0.828869 | 0.814558 | 0.794983 | 0.777053 | 0.770117 | 0.735243 | 0 | 0.004752 | 0.476553 | 82,016 | 1,984 | 121 | 41.33871 | 0.844867 | 0.086825 | 0 | 0.733633 | 0 | 0.001284 | 0.133166 | 0 | 0 | 0 | 0 | 0 | 0.039153 | 1 | 0.032092 | false | 0.002567 | 0.005777 | 0 | 0.068678 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
7907365d1d7dfdab7e99a00ac86d16b236ea1b0a | 16,564 | py | Python | tracpro/profiles/tests/test_views.py | rapidpro/tracpro | a68a782a7ff9bb0ccee85368132d8847c280fea3 | [
"BSD-3-Clause"
] | 5 | 2015-07-21T15:58:31.000Z | 2019-09-14T22:34:00.000Z | tracpro/profiles/tests/test_views.py | rapidpro/tracpro | a68a782a7ff9bb0ccee85368132d8847c280fea3 | [
"BSD-3-Clause"
] | 197 | 2015-03-24T15:26:04.000Z | 2017-11-28T19:24:37.000Z | tracpro/profiles/tests/test_views.py | rapidpro/tracpro | a68a782a7ff9bb0ccee85368132d8847c280fea3 | [
"BSD-3-Clause"
] | 10 | 2015-03-24T12:26:36.000Z | 2017-02-21T13:08:57.000Z | from __future__ import absolute_import, unicode_literals
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from tracpro.test.cases import TracProDataTest
class ManageUserCreateTest(TracProDataTest):
def test_create_as_non_superuser(self):
# Non-superuser cannot use this view
url = reverse('profiles.admin_create')
self.login(self.admin) # Not a superuser
# Post something that would be an error (empty form) and would be a 200
# status if we had access.
response = self.url_post('unicef', url, dict())
# We get redirected to login
self.assertEqual(response.status_code, 302, response)
self.assertIn('login', response['Location'])
def test_create_with_fields_missing(self):
# An error case
url = reverse('profiles.admin_create')
self.login(self.superuser)
# submit with no fields entered
response = self.url_post('unicef', url, dict())
self.assertEqual(response.status_code, 200, response)
error_dict = response.context['form'].errors
self.assertEqual(4, len(error_dict), repr(error_dict))
self.assertFormError(
response, 'form', 'full_name',
'This field is required.')
self.assertFormError(
response, 'form', 'email',
'This field is required.')
self.assertFormError(
response, 'form', 'password',
'This field is required.')
self.assertFormError(
response, 'form', '__all__',
'Email address already taken.' # FIXME: this error makes no sense in this context
)
def test_create_successfully(self):
# create non-superuser
url = reverse('profiles.admin_create')
self.login(self.superuser)
data = {
'full_name': "Mo Polls",
'email': "mo@trac.com",
'password': "abc123xy",
'confirm_password': "abc123xy",
'is_active': True,
'is_superuser': False,
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302, response)
user = User.objects.get(email='mo@trac.com')
self.assertEqual(user.profile.full_name, 'Mo Polls')
self.assertTrue(user.is_active)
self.assertFalse(user.is_superuser)
self.assertEqual(user, authenticate(username=user.username, password="abc123xy"))
def test_create_superuser(self):
# create superuser
url = reverse('profiles.admin_create')
self.login(self.superuser)
data = {
'full_name': "Mo Polls",
'email': "mo@trac.com",
'password': "abc123xy",
'confirm_password': "abc123xy",
'is_active': True,
'is_superuser': True,
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302, response)
user = User.objects.get(email='mo@trac.com')
self.assertEqual(user.profile.full_name, 'Mo Polls')
self.assertTrue(user.is_active)
self.assertTrue(user.is_superuser)
class ManageUserUpdateTest(TracProDataTest):
def test_update_as_non_superuser(self):
# Non-superuser cannot use this view
self.login(self.admin)
url = reverse('profiles.admin_update', args=[self.user1.pk])
response = self.url_post('unicef', url, dict())
self.assertEqual(response.status_code, 302)
self.assertIn('login', response['Location'])
def test_update(self):
# Change non-superuser to superuser, change their password, etc etc.
self.login(self.superuser)
url = reverse('profiles.admin_update', args=[self.user1.pk])
data = {
'full_name': "Mo Polls",
'email': "mo@trac.com",
'new_password': "abc123xy",
'confirm_password': "abc123xy",
'is_active': False,
'is_superuser': True,
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
user = User.objects.get(email='mo@trac.com')
self.assertEqual(user.profile.full_name, "Mo Polls")
self.assertFalse(user.is_active)
self.assertTrue(user.is_superuser)
self.assertEqual(user, authenticate(username=user.username, password="abc123xy"))
# and back. changing password optional.
data = {
'full_name': "Mo Polls",
'email': "mo@trac.com",
# 'password': "abc123xy",
# 'confirm_password': "abc123xy",
'is_active': True,
'is_superuser': False,
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
user = User.objects.get(email='mo@trac.com')
self.assertEqual(user.profile.full_name, "Mo Polls")
self.assertTrue(user.is_active)
self.assertFalse(user.is_superuser)
self.assertEqual(user, authenticate(username=user.username, password="abc123xy"))
class UserCRUDLTest(TracProDataTest):
def test_create(self):
url = reverse('profiles.user_create')
# log in as an org administrator
self.login(self.admin)
# submit with no fields entered
response = self.url_post('unicef', url, dict())
self.assertEqual(response.status_code, 200)
self.assertFormError(
response, 'form', 'full_name',
'This field is required.')
self.assertFormError(
response, 'form', 'email',
'This field is required.')
self.assertFormError(
response, 'form', 'password',
'This field is required.')
# submit again with all required fields but invalid password
data = {
'full_name': "Mo Polls",
'email': "mo@trac.com",
'password': "123",
'confirm_password': "123",
}
response = self.url_post('unicef', url, data)
self.assertFormError(
response, 'form', 'password',
"Ensure this value has at least 8 characters (it has 3).")
# submit again with valid password but mismatched confirmation
data = {
'full_name': "Mo Polls",
'email': "mo@trac.com",
'password': "Qwerty123",
'confirm_password': "123",
}
response = self.url_post('unicef', url, data)
self.assertFormError(
response, 'form', 'confirm_password',
"Passwords don't match.")
# submit again with valid password and confirmation
data = {
'full_name': "Mo Polls",
'email': "mo@trac.com",
'password': "Qwerty123",
'confirm_password': "Qwerty123",
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
# check new user and profile
user = User.objects.get(email="mo@trac.com")
self.assertEqual(user.profile.full_name, "Mo Polls")
self.assertEqual(user.email, "mo@trac.com")
self.assertEqual(user.username, "mo@trac.com")
# try again with same email address
data = {
'full_name': "Mo Polls II",
'email': "mo@trac.com",
'password': "Qwerty123",
'confirm_password': "Qwerty123",
}
response = self.url_post('unicef', url, data)
self.assertFormError(
response, 'form', None,
"Email address already taken.")
def test_update(self):
url = reverse('profiles.user_update', args=[self.user1.pk])
# log in as an org administrator
self.login(self.admin)
response = self.url_get('unicef', url)
self.assertEqual(response.status_code, 200)
# can assign to any org region
self.assertEqual(len(response.context['form'].fields['regions'].choices), 3)
# submit with no fields entered
response = self.url_post('unicef', url, dict())
self.assertEqual(response.status_code, 200)
self.assertFormError(
response, 'form', 'full_name',
'This field is required.')
self.assertFormError(
response, 'form', 'email',
'This field is required.')
# submit with all fields entered
data = {
'full_name': "Morris",
'email': "mo2@chat.com",
'regions': [self.region3.pk],
'is_active': True,
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
# check updated user and profile
user = User.objects.get(pk=self.user1.pk)
self.assertEqual(user.profile.full_name, "Morris")
self.assertEqual(user.email, "mo2@chat.com")
self.assertEqual(user.username, "mo2@chat.com")
self.assertEqual(list(user.regions.all()), [self.region3])
# submit again for good measure
data = {
'full_name': "Morris",
'email': "mo2@chat.com",
'regions': [self.region3.pk],
'is_active': True,
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
# try giving user someone else's email address
data = {
'full_name': "Morris",
'email': "eric@nyaruka.com",
'password': "Qwerty123",
'confirm_password': "Qwerty123",
}
response = self.url_post('unicef', url, data)
self.assertFormError(
response, 'form', None,
"Email address already taken.")
# check de-activating user
data = {
'full_name': "Morris",
'email': "mo2@chat.com",
'regions': [],
'is_active': False,
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
# check user object is inactive
user = User.objects.get(pk=self.user1.pk)
self.assertFalse(user.is_active)
def test_read(self):
# log in as an org administrator
self.login(self.admin)
# view our own profile
response = self.url_get(
'unicef', reverse('profiles.user_read', args=[self.admin.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.context['edit_button_url'],
reverse('profiles.user_self'))
# view other user's profile
response = self.url_get(
'unicef', reverse('profiles.user_read', args=[self.user1.pk]))
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.context['edit_button_url'],
reverse('profiles.user_update', args=[self.user1.pk]))
# try to view user from other org
response = self.url_get(
'unicef', reverse('profiles.user_read', args=[self.user3.pk]))
self.assertEqual(response.status_code, 404)
# log in as a user
self.login(self.user1)
# view other user's profile
response = self.url_get(
'unicef', reverse('profiles.user_read', args=[self.admin.pk]))
self.assertEqual(response.status_code, 200)
self.assertIsNone(response.context['edit_button_url'])
def test_list(self):
url = reverse('profiles.user_list')
response = self.url_get('unicef', url)
self.assertLoginRedirect(response, 'unicef', url)
# log in as a non-administrator
self.login(self.user1)
response = self.url_get('unicef', url)
self.assertLoginRedirect(response, 'unicef', url)
# log in as an org administrator
self.login(self.admin)
response = self.url_get('unicef', url)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['object_list']), 2)
def test_self(self):
url = reverse('profiles.user_self')
# try as unauthenticated
response = self.url_get('unicef', url)
self.assertLoginRedirect(response, 'unicef', url)
# try as superuser (doesn't have a chat profile)
self.login(self.superuser)
response = self.url_get('unicef', url)
self.assertEqual(response.status_code, 404)
# log in as an org administrator
self.login(self.admin)
response = self.url_get('unicef', url)
self.assertEqual(response.status_code, 200)
# log in as a user
self.login(self.user1)
response = self.url_get('unicef', url)
self.assertEqual(response.status_code, 200)
# submit with no fields entered
response = self.url_post('unicef', url, dict())
self.assertEqual(response.status_code, 200)
self.assertFormError(
response, 'form', 'full_name',
'This field is required.')
self.assertFormError(
response, 'form', 'email',
'This field is required.')
# submit with all required fields entered
data = dict(full_name="Morris", email="mo2@trac.com")
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
# check updated user and profile
user = User.objects.get(pk=self.user1.pk)
self.assertEqual(user.profile.full_name, "Morris")
self.assertEqual(user.email, "mo2@trac.com")
self.assertEqual(user.username, "mo2@trac.com")
self.assertEqual(list(user.regions.all()), [self.region1])
# submit with all required fields entered and password fields
old_password_hash = user.password
data = {
'full_name': "Morris",
'email': "mo2@trac.com",
'new_password': "Qwerty123",
'confirm_password': "Qwerty123",
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
# check password has been changed
user = User.objects.get(pk=self.user1.pk)
self.assertNotEqual(user.password, old_password_hash)
# check when user is being forced to change their password
old_password_hash = user.password
self.user1.profile.change_password = True
self.user1.profile.save()
# submit without password
data = dict(full_name="Morris", email="mo2@trac.com")
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 200)
self.assertFormError(
response, 'form', 'password',
'This field is required.')
# submit again with password but no confirmation
data = {
'full_name': "Morris",
'email': "mo2@trac.com",
'password': "Qwerty123",
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 200)
self.assertFormError(
response, 'form', 'confirm_password',
"Passwords don't match.")
# submit again with password and confirmation
data = {
'full_name': "Morris",
'email': "mo2@trac.com",
'password': "Qwerty123",
'confirm_password': "Qwerty123",
}
response = self.url_post('unicef', url, data)
self.assertEqual(response.status_code, 302)
# check password has changed and no longer has to be changed
user = User.objects.get(pk=self.user1.pk)
self.assertFalse(user.profile.change_password)
self.assertNotEqual(user.password, old_password_hash)
class DashUserCRUDLTest(TracProDataTest):
def test_login(self):
url = reverse('users.user_login')
# login without org subdomain
response = self.url_post(None, url, {
'username': 'sam@unicef.org',
'password': 'sam@unicef.org',
})
self.assertRedirects(
response, 'http://testserver/',
fetch_redirect_response=False)
# login with org subdomain
response = self.url_post('unicef', url, {
'username': 'sam@unicef.org',
'password': 'sam@unicef.org',
})
self.assertRedirects(
response, 'http://unicef.testserver/',
fetch_redirect_response=False)
| 36.324561 | 94 | 0.594482 | 1,832 | 16,564 | 5.264738 | 0.114629 | 0.079316 | 0.057543 | 0.084189 | 0.822602 | 0.776257 | 0.752618 | 0.717781 | 0.691654 | 0.65464 | 0 | 0.017082 | 0.286102 | 16,564 | 455 | 95 | 36.404396 | 0.798562 | 0.11368 | 0 | 0.777448 | 0 | 0 | 0.185026 | 0.008615 | 0 | 0 | 0 | 0.002198 | 0.261128 | 1 | 0.035608 | false | 0.118694 | 0.014837 | 0 | 0.062315 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
7909692c43386bd754780313c2a51dcbdbbc4e97 | 1,664 | py | Python | tests/test_sql.py | vprakash-ucl/pyrate | 42f67a5f8a89740bd6ef31458383550dba5a09ca | [
"MIT"
] | 22 | 2015-03-17T14:36:39.000Z | 2022-03-14T12:31:08.000Z | tests/test_sql.py | vprakash-ucl/pyrate | 42f67a5f8a89740bd6ef31458383550dba5a09ca | [
"MIT"
] | 31 | 2015-09-02T10:52:55.000Z | 2016-08-23T09:00:36.000Z | tests/test_sql.py | UCL-ShippingGroup/pyrate | 4887316d8935f7aaeaa18144dd1acd274d7dced0 | [
"MIT"
] | 10 | 2015-12-23T13:01:07.000Z | 2022-03-15T10:52:46.000Z | """ Tests the creation of tables, and the methods of the sql class
"""
from pyrate.repositories.sql import Table
from utilities import setup_database
class TestSql:
""" Tests the Sql class
"""
def test_get_list_of_columns(self, setup_database):
db = setup_database
rows = [{'unit': 'days',
'description': 'At berth/anchor',
'name': 's_berth_day'},
{'unit': 'SOG / kts',
'description': 'Average at sea',
'name': 's_av_sea'}]
with db:
actual = db.clean._get_list_of_columns(rows[0])
assert isinstance(actual, str)
assert actual.endswith(')')
assert actual[0] == '('
actual_contents = actual.strip('()').split(',')
expected = ['description','name','unit']
for expected_column in expected:
assert expected_column in actual_contents
def test_get_list_of_columns_lowerconversion(self, setup_database):
db = setup_database
rows = [{'uNit': 'days',
'Description': 'At berth/anchor',
'namE': 's_berth_day'},
{'unit': 'SOG / kts',
'description': 'Average at sea',
'name': 's_av_sea'}]
with db:
actual = db.clean._get_list_of_columns(rows[0])
assert isinstance(actual, str)
assert actual.endswith(')')
assert actual[0] == '('
actual_contents = actual.strip('()').split(',')
expected = ['description','name','unit']
for expected_column in expected:
assert expected_column in actual_contents
| 33.959184 | 71 | 0.558293 | 180 | 1,664 | 4.95 | 0.311111 | 0.072952 | 0.040404 | 0.071829 | 0.819304 | 0.819304 | 0.767677 | 0.767677 | 0.767677 | 0.767677 | 0 | 0.0035 | 0.313101 | 1,664 | 48 | 72 | 34.666667 | 0.776028 | 0.052284 | 0 | 0.702703 | 0 | 0 | 0.15739 | 0 | 0 | 0 | 0 | 0 | 0.216216 | 1 | 0.054054 | false | 0 | 0.054054 | 0 | 0.135135 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
79229fcb1f674c9b72d9c4276bad78034eabd994 | 16,335 | py | Python | pay-api/tests/unit/services/test_payment_transaction.py | mengdong19/sbc-pay | a5a1d81ae3500457178371fb004fd09194bbb6b7 | [
"Apache-2.0"
] | null | null | null | pay-api/tests/unit/services/test_payment_transaction.py | mengdong19/sbc-pay | a5a1d81ae3500457178371fb004fd09194bbb6b7 | [
"Apache-2.0"
] | null | null | null | pay-api/tests/unit/services/test_payment_transaction.py | mengdong19/sbc-pay | a5a1d81ae3500457178371fb004fd09194bbb6b7 | [
"Apache-2.0"
] | null | null | null | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests to assure the FeeSchedule Service.
Test-Suite to ensure that the FeeSchedule Service is working as expected.
"""
import uuid
from datetime import datetime
import pytest
from pay_api.exceptions import BusinessException
from pay_api.models import FeeSchedule
from pay_api.services.payment_transaction import PaymentTransaction as PaymentTransactionService
from pay_api.utils.enums import Status
from pay_api.utils.errors import Error
from tests import skip_in_pod
from tests.utilities.base_test import (
factory_invoice, factory_invoice_reference, factory_payment, factory_payment_account, factory_payment_line_item,
factory_payment_transaction, get_paybc_transaction_request)
def test_transaction_saved_from_new(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
payment_transaction = PaymentTransactionService()
payment_transaction.status_code = 'DRAFT'
payment_transaction.transaction_end_time = datetime.now()
payment_transaction.transaction_start_time = datetime.now()
payment_transaction.pay_system_url = 'http://google.com'
payment_transaction.client_system_url = 'http://google.com'
payment_transaction.payment_id = payment.id
payment_transaction = payment_transaction.save()
transaction = PaymentTransactionService.find_by_id(payment.id, payment_transaction.id)
assert transaction is not None
assert transaction.id is not None
assert transaction.status_code is not None
assert transaction.payment_id is not None
assert transaction.client_system_url is not None
assert transaction.pay_system_url is not None
assert transaction.transaction_start_time is not None
assert transaction.transaction_end_time is not None
def test_transaction_create_from_new(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
assert transaction is not None
assert transaction.id is not None
assert transaction.status_code is not None
assert transaction.payment_id is not None
assert transaction.client_system_url is not None
assert transaction.pay_system_url is not None
assert transaction.transaction_start_time is not None
assert transaction.asdict() is not None
def test_transaction_create_from_invalid_payment(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
with pytest.raises(BusinessException) as excinfo:
PaymentTransactionService.create(999, get_paybc_transaction_request())
assert excinfo.value.status == Error.PAY005.status
assert excinfo.value.message == Error.PAY005.message
assert excinfo.value.code == Error.PAY005.name
@skip_in_pod
def test_transaction_update(session, stan_server, public_user_mock):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
transaction = PaymentTransactionService.update_transaction(payment.id, transaction.id, '123451')
assert transaction is not None
assert transaction.id is not None
assert transaction.status_code is not None
assert transaction.payment_id is not None
assert transaction.client_system_url is not None
assert transaction.pay_system_url is not None
assert transaction.transaction_start_time is not None
assert transaction.transaction_end_time is not None
assert transaction.status_code == Status.COMPLETED.value
@skip_in_pod
def test_transaction_update_with_no_receipt(session, stan_server):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id, invoice_number='').save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
transaction = PaymentTransactionService.update_transaction(payment.id, transaction.id, None)
assert transaction is not None
assert transaction.id is not None
assert transaction.status_code is not None
assert transaction.payment_id is not None
assert transaction.client_system_url is not None
assert transaction.pay_system_url is not None
assert transaction.transaction_start_time is not None
assert transaction.transaction_end_time is not None
assert transaction.status_code == Status.FAILED.value
assert transaction.asdict() is not None
@skip_in_pod
def test_transaction_update_completed(session, stan_server, public_user_mock):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
transaction = PaymentTransactionService.update_transaction(payment.id, transaction.id, '123451')
with pytest.raises(BusinessException) as excinfo:
PaymentTransactionService.update_transaction(payment.id, transaction.id, '123451')
assert excinfo.value.status == Error.PAY006.status
assert excinfo.value.message == Error.PAY006.message
assert excinfo.value.code == Error.PAY006.name
def test_transaction_create_new_on_completed_payment(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment(payment_status_code=Status.COMPLETED.value)
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
with pytest.raises(BusinessException) as excinfo:
PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
assert excinfo.value.status == Error.PAY006.status
assert excinfo.value.message == Error.PAY006.message
assert excinfo.value.code == Error.PAY006.name
def test_multiple_transactions_for_single_payment(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
transaction = PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
assert transaction is not None
assert transaction.id is not None
assert transaction.status_code is not None
assert transaction.payment_id is not None
assert transaction.client_system_url is not None
assert transaction.pay_system_url is not None
assert transaction.transaction_start_time is not None
assert transaction.status_code == Status.CREATED.value
def test_transaction_invalid_lookup(session):
"""Invalid lookup.."""
with pytest.raises(BusinessException) as excinfo:
PaymentTransactionService.find_by_id(1, uuid.uuid4())
assert excinfo.value.status == Error.PAY008.status
assert excinfo.value.message == Error.PAY008.message
assert excinfo.value.code == Error.PAY008.name
def test_transaction_invalid_update(session):
"""Invalid update.."""
with pytest.raises(BusinessException) as excinfo:
PaymentTransactionService.update_transaction(1, uuid.uuid4(), None)
assert excinfo.value.status == Error.PAY008.status
assert excinfo.value.message == Error.PAY008.message
assert excinfo.value.code == Error.PAY008.name
def test_transaction_find_active_lookup(session):
"""Invalid lookup.."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = factory_payment_transaction(payment.id, Status.CREATED.value)
transaction.save()
transaction = PaymentTransactionService.find_active_by_payment_id(payment.id)
assert transaction is not None
assert transaction.id is not None
assert transaction.status_code is not None
assert transaction.payment_id is not None
assert transaction.client_system_url is not None
assert transaction.pay_system_url is not None
assert transaction.transaction_start_time is not None
assert transaction.status_code == Status.CREATED.value
def test_transaction_find_active_none_lookup(session):
"""Invalid lookup.."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = factory_payment_transaction(payment.id, Status.COMPLETED.value)
transaction.save()
transaction = PaymentTransactionService.find_active_by_payment_id(payment.id)
assert transaction is None
def test_transaction_find_by_payment_id(session):
"""Find all transactions by payment id.."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = factory_payment_transaction(payment.id, Status.CREATED.value)
transaction.save()
transaction = PaymentTransactionService.find_by_payment_id(payment.id)
assert transaction is not None
assert transaction.get('items') is not None
assert transaction.get('items')[0].get('_links') is not None
def test_no_existing_transaction(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = PaymentTransactionService.find_active_by_payment_id(payment.id)
assert transaction is None
@skip_in_pod
def test_transaction_update_on_paybc_connection_error(session, stan_server):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment, payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
fee_schedule = FeeSchedule.find_by_filing_type_and_corp_type('CP', 'OTANN')
line = factory_payment_line_item(invoice.id, fee_schedule_id=fee_schedule.fee_schedule_id)
line.save()
transaction = PaymentTransactionService.create(payment.id, get_paybc_transaction_request())
from unittest.mock import patch
from requests.exceptions import ConnectTimeout, ConnectionError
# Mock here that the invoice update fails here to test the rollback scenario
with patch('pay_api.services.oauth_service.requests.post', side_effect=ConnectionError('mocked error')):
transaction = PaymentTransactionService.update_transaction(payment.id, transaction.id, '123451')
assert transaction.pay_system_reason_code == 'SERVICE_UNAVAILABLE'
with patch('pay_api.services.oauth_service.requests.post', side_effect=ConnectTimeout('mocked error')):
transaction = PaymentTransactionService.update_transaction(payment.id, transaction.id, '123451')
assert transaction.pay_system_reason_code == 'SERVICE_UNAVAILABLE'
assert transaction is not None
assert transaction.id is not None
assert transaction.status_code is not None
assert transaction.payment_id is not None
assert transaction.client_system_url is not None
assert transaction.pay_system_url is not None
assert transaction.transaction_start_time is not None
assert transaction.transaction_end_time is not None
assert transaction.status_code == Status.FAILED.value
| 42.874016 | 116 | 0.775574 | 2,096 | 16,335 | 5.766698 | 0.090172 | 0.094233 | 0.043187 | 0.067014 | 0.856457 | 0.83958 | 0.82626 | 0.805163 | 0.788864 | 0.765202 | 0 | 0.006514 | 0.144781 | 16,335 | 380 | 117 | 42.986842 | 0.858626 | 0.081726 | 0 | 0.809689 | 0 | 0 | 0.021872 | 0.005904 | 0 | 0 | 0 | 0 | 0.283737 | 1 | 0.051903 | false | 0 | 0.041522 | 0 | 0.093426 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f71c722935297413c44452739b1c5efe80dcce1c | 5,736 | py | Python | tests/test_ac.py | knovichikhin/pyemv | 4a07cb550f27618822b530c6aa954e0820020ae7 | [
"MIT"
] | 14 | 2020-11-01T11:44:41.000Z | 2022-03-24T15:53:23.000Z | tests/test_ac.py | manoutoftime/pyemv | 4a07cb550f27618822b530c6aa954e0820020ae7 | [
"MIT"
] | 3 | 2021-06-18T01:36:00.000Z | 2021-10-17T02:09:50.000Z | tests/test_ac.py | manoutoftime/pyemv | 4a07cb550f27618822b530c6aa954e0820020ae7 | [
"MIT"
] | 8 | 2020-10-09T20:23:39.000Z | 2022-03-31T00:56:47.000Z | import pytest
from pyemv import ac
def test_generate_ac_exception() -> None:
# SK < 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
data=bytes.fromhex("12345678901214"),
)
# SK > 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
data=bytes.fromhex("12345678901214"),
)
# Invalid padding type
with pytest.raises(
TypeError,
match="Padding type must be PaddingType Enum, not dict",
):
ac.generate_ac(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
data=bytes.fromhex("12345678901214"),
padding_type={}, # type: ignore
)
def test_generate_arpc_1_exception() -> None:
# SK < 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
# SK > 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
# ARQC < 8 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("12345678"),
arpc_rc=bytes.fromhex("0000"),
)
# ARQC > 16 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890ABCDEF12"),
arpc_rc=bytes.fromhex("0000"),
)
# ARPC-RC < 2 bytes
with pytest.raises(
ValueError,
match="ARPC-RC must be 2 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890123456"),
arpc_rc=bytes.fromhex("00"),
)
# ARPC-RC > 2 bytes
with pytest.raises(
ValueError,
match="ARPC-RC must be 2 bytes long",
):
ac.generate_arpc_1(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890123456"),
arpc_rc=bytes.fromhex("001122"),
)
def test_generate_arpc_2_exception() -> None:
# SK < 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAA"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# SK > 16 bytes
with pytest.raises(
ValueError,
match="Session Key must be a double length DES key",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBBCCCCCCCCCCCCCCCC"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# ARQC < 8 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("12345678901214"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# ARQC > 16 bytes
with pytest.raises(
ValueError,
match="ARQC must be 8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890ABCDEF12"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# CSU < 4 bytes
with pytest.raises(
ValueError,
match="CSU must be 4 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("123456"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# CSU > 4 bytes
with pytest.raises(
ValueError,
match="CSU must be 4 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("1234567890"),
prop_auth_data=bytes.fromhex("1234567890123456"),
)
# PAD > 8 bytes
with pytest.raises(
ValueError,
match="Proprietary Authentication Data must be 0-8 bytes long",
):
ac.generate_arpc_2(
sk_ac=bytes.fromhex("AAAAAAAAAAAAAAAABBBBBBBBBBBBBBBB"),
arqc=bytes.fromhex("1234567890121456"),
csu=bytes.fromhex("12345678"),
prop_auth_data=bytes.fromhex("123456789012345678"),
)
| 30.031414 | 84 | 0.591702 | 586 | 5,736 | 5.66041 | 0.110922 | 0.188122 | 0.077178 | 0.077178 | 0.889961 | 0.854386 | 0.842327 | 0.822731 | 0.784142 | 0.749774 | 0 | 0.113273 | 0.301255 | 5,736 | 190 | 85 | 30.189474 | 0.714321 | 0.044805 | 0 | 0.805195 | 1 | 0 | 0.273393 | 0.084966 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019481 | true | 0 | 0.012987 | 0 | 0.032468 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
f77cc70d7987eeeb9caec4b05b4ad1b4c6b24a96 | 15,862 | py | Python | venv/Lib/site-packages/skimage/segmentation/tests/test_slic.py | amelliaaas/tugastkc4 | f442382c72379e911f3780543b95345a3b1c9407 | [
"Apache-2.0"
] | 4 | 2021-10-20T12:39:09.000Z | 2022-02-26T15:02:08.000Z | venv/Lib/site-packages/skimage/segmentation/tests/test_slic.py | amelliaaas/tugastkc4 | f442382c72379e911f3780543b95345a3b1c9407 | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/skimage/segmentation/tests/test_slic.py | amelliaaas/tugastkc4 | f442382c72379e911f3780543b95345a3b1c9407 | [
"Apache-2.0"
] | 20 | 2021-11-07T13:55:56.000Z | 2021-12-02T10:54:01.000Z | from itertools import product
import pytest
import numpy as np
from skimage.segmentation import slic
from skimage._shared import testing
from skimage._shared.testing import test_parallel, assert_equal
@test_parallel()
def test_color_2d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, n_segments=4, sigma=0, enforce_connectivity=False,
start_label=0)
# we expect 4 segments
assert_equal(len(np.unique(seg)), 4)
assert_equal(seg.shape, img.shape[:-1])
assert_equal(seg[:10, :10], 0)
assert_equal(seg[10:, :10], 2)
assert_equal(seg[:10, 10:], 1)
assert_equal(seg[10:, 10:], 3)
def test_multichannel_2d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 20, 8))
img[:10, :10, 0:2] = 1
img[:10, 10:, 2:4] = 1
img[10:, :10, 4:6] = 1
img[10:, 10:, 6:8] = 1
img += 0.01 * rnd.normal(size=img.shape)
img = np.clip(img, 0, 1, out=img)
seg = slic(img, n_segments=4, enforce_connectivity=False, start_label=0)
# we expect 4 segments
assert_equal(len(np.unique(seg)), 4)
assert_equal(seg.shape, img.shape[:-1])
assert_equal(seg[:10, :10], 0)
assert_equal(seg[10:, :10], 2)
assert_equal(seg[:10, 10:], 1)
assert_equal(seg[10:, 10:], 3)
def test_gray_2d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21))
img[:10, :10] = 0.33
img[10:, :10] = 0.67
img[10:, 10:] = 1.00
img += 0.0033 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, sigma=0, n_segments=4, compactness=1,
multichannel=False, convert2lab=False, start_label=0)
assert_equal(len(np.unique(seg)), 4)
assert_equal(seg.shape, img.shape)
assert_equal(seg[:10, :10], 0)
assert_equal(seg[10:, :10], 2)
assert_equal(seg[:10, 10:], 1)
assert_equal(seg[10:, 10:], 3)
def test_color_3d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 22, 3))
slices = []
for dim_size in img.shape[:-1]:
midpoint = dim_size // 2
slices.append((slice(None, midpoint), slice(midpoint, None)))
slices = list(product(*slices))
colors = list(product(*(([0, 1],) * 3)))
for s, c in zip(slices, colors):
img[s] = c
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, sigma=0, n_segments=8, start_label=0)
assert_equal(len(np.unique(seg)), 8)
for s, c in zip(slices, range(8)):
assert_equal(seg[s], c)
def test_gray_3d():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 22))
slices = []
for dim_size in img.shape:
midpoint = dim_size // 2
slices.append((slice(None, midpoint), slice(midpoint, None)))
slices = list(product(*slices))
shades = np.arange(0, 1.000001, 1.0 / 7)
for s, sh in zip(slices, shades):
img[s] = sh
img += 0.001 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, sigma=0, n_segments=8, compactness=1,
multichannel=False, convert2lab=False, start_label=0)
assert_equal(len(np.unique(seg)), 8)
for s, c in zip(slices, range(8)):
assert_equal(seg[s], c)
def test_list_sigma():
rnd = np.random.RandomState(0)
img = np.array([[1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1]], float)
img += 0.1 * rnd.normal(size=img.shape)
result_sigma = np.array([[0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1]], int)
seg_sigma = slic(img, n_segments=2, sigma=[1, 50, 1],
multichannel=False, start_label=0)
assert_equal(seg_sigma, result_sigma)
def test_spacing():
rnd = np.random.RandomState(0)
img = np.array([[1, 1, 1, 0, 0],
[1, 1, 0, 0, 0]], float)
result_non_spaced = np.array([[0, 0, 0, 1, 1],
[0, 0, 1, 1, 1]], int)
result_spaced = np.array([[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1]], int)
img += 0.1 * rnd.normal(size=img.shape)
seg_non_spaced = slic(img, n_segments=2, sigma=0, multichannel=False,
compactness=1.0, start_label=0)
seg_spaced = slic(img, n_segments=2, sigma=0, spacing=[1, 500, 1],
compactness=1.0, multichannel=False, start_label=0)
assert_equal(seg_non_spaced, result_non_spaced)
assert_equal(seg_spaced, result_spaced)
def test_invalid_lab_conversion():
img = np.array([[1, 1, 1, 0, 0],
[1, 1, 0, 0, 0]], float) + 1
with testing.raises(ValueError):
slic(img, multichannel=True, convert2lab=True, start_label=0)
def test_enforce_connectivity():
img = np.array([[0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 1, 0],
[0, 0, 0, 1, 1, 0]], float)
segments_connected = slic(img, 2, compactness=0.0001,
enforce_connectivity=True,
convert2lab=False, start_label=0)
segments_disconnected = slic(img, 2, compactness=0.0001,
enforce_connectivity=False,
convert2lab=False, start_label=0)
# Make sure nothing fatal occurs (e.g. buffer overflow) at low values of
# max_size_factor
segments_connected_low_max = slic(img, 2, compactness=0.0001,
enforce_connectivity=True,
convert2lab=False,
max_size_factor=0.8,
start_label=0)
result_connected = np.array([[0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 1, 1]], float)
result_disconnected = np.array([[0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 1, 0],
[0, 0, 0, 1, 1, 0]], float)
assert_equal(segments_connected, result_connected)
assert_equal(segments_disconnected, result_disconnected)
assert_equal(segments_connected_low_max, result_connected)
def test_slic_zero():
# Same as test_color_2d but with slic_zero=True
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, n_segments=4, sigma=0, slic_zero=True, start_label=0)
# we expect 4 segments
assert_equal(len(np.unique(seg)), 4)
assert_equal(seg.shape, img.shape[:-1])
assert_equal(seg[:10, :10], 0)
assert_equal(seg[10:, :10], 2)
assert_equal(seg[:10, 10:], 1)
assert_equal(seg[10:, 10:], 3)
def test_more_segments_than_pixels():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21))
img[:10, :10] = 0.33
img[10:, :10] = 0.67
img[10:, 10:] = 1.00
img += 0.0033 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = slic(img, sigma=0, n_segments=500, compactness=1,
multichannel=False, convert2lab=False, start_label=0)
assert np.all(seg.ravel() == np.arange(seg.size))
def test_color_2d_mask():
rnd = np.random.RandomState(0)
msk = np.zeros((20, 21))
msk[2:-2, 2:-2] = 1
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
np.clip(img, 0, 1, out=img)
seg = slic(img, n_segments=4, sigma=0, enforce_connectivity=False,
mask=msk)
# we expect 4 segments + masked area
assert_equal(len(np.unique(seg)), 5)
assert_equal(seg.shape, img.shape[:-1])
# segments
assert_equal(seg[2:10, 2:10], 1)
assert_equal(seg[10:-2, 2:10], 4)
assert_equal(seg[2:10, 10:-2], 2)
assert_equal(seg[10:-2, 10:-2], 3)
# non masked area
assert_equal(seg[:2, :], 0)
assert_equal(seg[-2:, :], 0)
assert_equal(seg[:, :2], 0)
assert_equal(seg[:, -2:], 0)
def test_multichannel_2d_mask():
rnd = np.random.RandomState(0)
msk = np.zeros((20, 20))
msk[2:-2, 2:-2] = 1
img = np.zeros((20, 20, 8))
img[:10, :10, 0:2] = 1
img[:10, 10:, 2:4] = 1
img[10:, :10, 4:6] = 1
img[10:, 10:, 6:8] = 1
img += 0.01 * rnd.normal(size=img.shape)
np.clip(img, 0, 1, out=img)
seg = slic(img, n_segments=4, enforce_connectivity=False,
mask=msk)
# we expect 4 segments + masked area
assert_equal(len(np.unique(seg)), 5)
assert_equal(seg.shape, img.shape[:-1])
# segments
assert_equal(seg[2:10, 2:10], 2)
assert_equal(seg[2:10, 10:-2], 1)
assert_equal(seg[10:-2, 2:10], 4)
assert_equal(seg[10:-2, 10:-2], 3)
# non masked area
assert_equal(seg[:2, :], 0)
assert_equal(seg[-2:, :], 0)
assert_equal(seg[:, :2], 0)
assert_equal(seg[:, -2:], 0)
def test_gray_2d_mask():
rnd = np.random.RandomState(0)
msk = np.zeros((20, 21))
msk[2:-2, 2:-2] = 1
img = np.zeros((20, 21))
img[:10, :10] = 0.33
img[10:, :10] = 0.67
img[10:, 10:] = 1.00
img += 0.0033 * rnd.normal(size=img.shape)
np.clip(img, 0, 1, out=img)
seg = slic(img, sigma=0, n_segments=4, compactness=1,
multichannel=False, convert2lab=False, mask=msk)
assert_equal(len(np.unique(seg)), 5)
assert_equal(seg.shape, img.shape)
# segments
assert_equal(seg[2:10, 2:10], 1)
assert_equal(seg[2:10, 10:-2], 2)
assert_equal(seg[10:-2, 2:10], 3)
assert_equal(seg[10:-2, 10:-2], 4)
# non masked area
assert_equal(seg[:2, :], 0)
assert_equal(seg[-2:, :], 0)
assert_equal(seg[:, :2], 0)
assert_equal(seg[:, -2:], 0)
def test_list_sigma_mask():
rnd = np.random.RandomState(0)
msk = np.zeros((2, 6))
msk[:, 1:-1] = 1
img = np.array([[1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 1]], float)
img += 0.1 * rnd.normal(size=img.shape)
result_sigma = np.array([[0, 1, 1, 2, 2, 0],
[0, 1, 1, 2, 2, 0]], int)
seg_sigma = slic(img, n_segments=2, sigma=[1, 50, 1],
multichannel=False, mask=msk)
assert_equal(seg_sigma, result_sigma)
def test_spacing_mask():
rnd = np.random.RandomState(0)
msk = np.zeros((2, 5))
msk[:, 1:-1] = 1
img = np.array([[1, 1, 1, 0, 0],
[1, 1, 0, 0, 0]], float)
result_non_spaced = np.array([[0, 1, 1, 2, 0],
[0, 1, 2, 2, 0]], int)
result_spaced = np.array([[0, 1, 1, 1, 0],
[0, 2, 2, 2, 0]], int)
img += 0.1 * rnd.normal(size=img.shape)
seg_non_spaced = slic(img, n_segments=2, sigma=0, multichannel=False,
compactness=1.0, mask=msk)
seg_spaced = slic(img, n_segments=2, sigma=0, spacing=[1, 50, 1],
compactness=1.0, multichannel=False, mask=msk)
assert_equal(seg_non_spaced, result_non_spaced)
assert_equal(seg_spaced, result_spaced)
def test_enforce_connectivity_mask():
msk = np.zeros((3, 6))
msk[:, 1:-1] = 1
img = np.array([[0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 1, 0],
[0, 0, 0, 1, 1, 0]], float)
segments_connected = slic(img, 2, compactness=0.0001,
enforce_connectivity=True,
convert2lab=False, mask=msk)
segments_disconnected = slic(img, 2, compactness=0.0001,
enforce_connectivity=False,
convert2lab=False, mask=msk)
# Make sure nothing fatal occurs (e.g. buffer overflow) at low values of
# max_size_factor
segments_connected_low_max = slic(img, 2, compactness=0.0001,
enforce_connectivity=True,
convert2lab=False,
max_size_factor=0.8, mask=msk)
result_connected = np.array([[0, 1, 1, 2, 2, 0],
[0, 1, 1, 2, 2, 0],
[0, 1, 1, 2, 2, 0]], float)
result_disconnected = np.array([[0, 1, 1, 2, 2, 0],
[0, 1, 1, 2, 2, 0],
[0, 1, 1, 2, 2, 0]], float)
assert_equal(segments_connected, result_connected)
assert_equal(segments_disconnected, result_disconnected)
assert_equal(segments_connected_low_max, result_connected)
def test_slic_zero_mask():
rnd = np.random.RandomState(0)
msk = np.zeros((20, 21))
msk[2:-2, 2:-2] = 1
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
np.clip(img, 0, 1, out=img)
seg = slic(img, n_segments=4, sigma=0, slic_zero=True,
mask=msk)
# we expect 4 segments + masked area
assert_equal(len(np.unique(seg)), 5)
assert_equal(seg.shape, img.shape[:-1])
# segments
assert_equal(seg[2:10, 2:10], 1)
assert_equal(seg[2:10, 10:-2], 2)
assert_equal(seg[10:-2, 2:10], 3)
assert_equal(seg[10:-2, 10:-2], 4)
# non masked area
assert_equal(seg[:2, :], 0)
assert_equal(seg[-2:, :], 0)
assert_equal(seg[:, :2], 0)
assert_equal(seg[:, -2:], 0)
def test_more_segments_than_pixels_mask():
rnd = np.random.RandomState(0)
msk = np.zeros((20, 21))
msk[2:-2, 2:-2] = 1
img = np.zeros((20, 21))
img[:10, :10] = 0.33
img[10:, :10] = 0.67
img[10:, 10:] = 1.00
img += 0.0033 * rnd.normal(size=img.shape)
np.clip(img, 0, 1, out=img)
seg = slic(img, sigma=0, n_segments=500, compactness=1,
multichannel=False, convert2lab=False, mask=msk)
expected = np.arange(seg[2:-2, 2:-2].size) + 1
assert np.all(seg[2:-2, 2:-2].ravel() == expected)
def test_color_3d_mask():
msk = np.zeros((20, 21, 22))
msk[2:-2, 2:-2, 2:-2] = 1
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 22, 3))
slices = []
for dim_size in msk.shape:
midpoint = dim_size // 2
slices.append((slice(None, midpoint), slice(midpoint, None)))
slices = list(product(*slices))
colors = list(product(*(([0, 1],) * 3)))
for s, c in zip(slices, colors):
img[s] = c
img += 0.01 * rnd.normal(size=img.shape)
np.clip(img, 0, 1, out=img)
seg = slic(img, sigma=0, n_segments=8, mask=msk)
# we expect 8 segments + masked area
assert_equal(len(np.unique(seg)), 9)
for s, c in zip(slices, range(1, 9)):
assert_equal(seg[s][2:-2, 2:-2, 2:-2], c)
def test_gray_3d_mask():
msk = np.zeros((20, 21, 22))
msk[2:-2, 2:-2, 2:-2] = 1
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 22))
slices = []
for dim_size in img.shape:
midpoint = dim_size // 2
slices.append((slice(None, midpoint), slice(midpoint, None)))
slices = list(product(*slices))
shades = np.linspace(0, 1, 8)
for s, sh in zip(slices, shades):
img[s] = sh
img += 0.001 * rnd.normal(size=img.shape)
np.clip(img, 0, 1, out=img)
seg = slic(img, sigma=0, n_segments=8, multichannel=False,
convert2lab=False, mask=msk)
# we expect 8 segments + masked area
assert_equal(len(np.unique(seg)), 9)
for s, c in zip(slices, range(1, 9)):
assert_equal(seg[s][2:-2, 2:-2, 2:-2], c)
@pytest.mark.parametrize("dtype", ['float32', 'float64', 'uint8', 'int'])
def test_dtype_support(dtype):
img = np.random.rand(28, 28).astype(dtype)
# Simply run the function to assert that it runs without error
slic(img, start_label=1)
| 33.393684 | 76 | 0.548985 | 2,470 | 15,862 | 3.414575 | 0.062348 | 0.110861 | 0.109557 | 0.012805 | 0.905383 | 0.89566 | 0.870761 | 0.86258 | 0.854399 | 0.843965 | 0 | 0.103288 | 0.282814 | 15,862 | 474 | 77 | 33.464135 | 0.638098 | 0.038961 | 0 | 0.776596 | 0 | 0 | 0.001774 | 0 | 0 | 0 | 0 | 0 | 0.231383 | 1 | 0.058511 | false | 0 | 0.015957 | 0 | 0.074468 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f79d31291891ba9f188570dfc62974963bb3e96a | 14,269 | py | Python | statslib.py | songlinhou/pytools | b990672fd9287da4fe57350bed28958ac261df81 | [
"Apache-2.0"
] | null | null | null | statslib.py | songlinhou/pytools | b990672fd9287da4fe57350bed28958ac261df81 | [
"Apache-2.0"
] | null | null | null | statslib.py | songlinhou/pytools | b990672fd9287da4fe57350bed28958ac261df81 | [
"Apache-2.0"
] | null | null | null | import scipy.stats as st
import numpy as np
import pandas as pd
class CI:
def one_proportion(self, n, phat, conf):
z_val = st.norm.ppf(1 - (1 - conf) / 2) # this is two-side
# z_val = st.norm.ppf(1 - (1 - conf)) # this is one-side
se = np.sqrt(phat*(1-phat)/n)
print('z-value=', z_val, "se=", se)
ci = (phat - z_val * se, phat + z_val * se)
print(ci)
def one_proportion_conserv(self, n, phat, conf):
# we center at phat, and try to estimate the margin of errors
# use normal-dist (z-value)
z_val = st.norm.ppf(1 - (1 - conf) / 2) # this is two-side
# z_val = st.norm.ppf(1 - (1 - conf)) # this is one-side
se = 1 / (2 * np.sqrt(n))
print('z-value=', z_val, "se=", se)
ci = (phat - z_val * se, phat + z_val * se)
print(ci)
def two_proportions(self, pos_num1, pos_num2, total_num1, total_num2, conf):
m1, m2 = pos_num1, pos_num2 # positive numbers
n1, n2 = total_num1, total_num2 # total numbers
p1, p2 = m1 / n1, m2 / n2
phat = p1 - p2
# phat *= -1
z_val = st.norm.ppf(1 - (1 - conf) / 2) # this is two-side
se = np.sqrt(p1 * (1 - p1) / n1 + p2 * (1 - p2) / n2)
print('z-value=', z_val, "se=", se)
print(f'{phat} +/- {z_val * se}')
ci = (phat - z_val * se, phat + z_val * se)
print(ci)
def one_mean(self, mu, n, sd, conf):
dof = n - 1
se = sd / np.sqrt(n)
t = st.t.ppf(1 - (1 - conf) / 2, df = dof)
print('t-value=', t, "se=", se)
print(f'{mu} +/- {t * se}')
ci = (mu - t * se, mu + t * se)
print(ci)
def two_mean_paired(self, mu_diff, sd, n, conf):
dof = n - 1
mu = mu_diff
se = sd / np.sqrt(n)
t = st.t.ppf(1 - (1 - conf) / 2, df = dof)
print('t-value=', t, "se=", se)
print(f'{mu} +/- {t * se}')
ci = (mu - t * se, mu + t * se)
print(ci)
if ci[0] <= 0 <= ci[1]:
print('0 is included. maybe no difference')
else:
print('0 is NOT included. some difference')
def two_means_independent_unpooled(self, mu1, mu2, sd1, sd2, n1, n2, conf):
dof = np.min([n1 - 1, n2 - 1])
mu_hat = mu1 - mu2
se = np.sqrt(sd1**2 / n1 + sd2**2 / n2)
t = st.t.ppf(1 - (1 - conf) / 2, df = dof)
print('DOF=', dof)
print('t-value=', t, "se=", se)
print(f'{mu_hat} +/- {t * se}')
ci = (mu_hat - t * se, mu_hat + t * se)
print(ci)
def two_means_independent_pooled(self, mu1, mu2, sd1, sd2, n1, n2, conf):
dof = n1 + n2 - 2
mu_hat = mu1 - mu2
se = np.sqrt(((n1 - 1) * sd1**2 + (n2 - 1) * sd2**2)/ (n1 + n2 - 2)) * np.sqrt(1/n1 + 1/n2)
t = st.t.ppf(1 - (1 - conf) / 2, df = dof)
print('DOF=', dof)
print('t-value=', t, "se=", se)
print(f'{mu_hat} +/- {t * se}')
ci = (mu_hat - t * se, mu_hat + t * se)
print(ci)
class HT:
def one_proportion_two_sides(self, p0, phat, n, alpha):
# check for assumption
if (n * p0 >= 10 and n * (1 - p0) >= 10):
print('sample size is large enough')
else:
print('sample size is NOT large enough')
se = np.sqrt(p0 * (1 - p0) / n)
z = abs(phat - p0) / se
print(f'z-value is {z}')
print(f'which means our observed sample proportion is {z} null SE above our hypothesized population proportion ABS value')
p_val = (1 - st.norm.cdf(z)) * 2
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
def one_proportion_phat_larger(self, p0, phat, n, alpha):
# check for assumption
if (n * p0 >= 10 and n * (1 - p0) >= 10):
print('sample size is large enough')
else:
print('sample size is NOT large enough')
se = np.sqrt(p0 * (1 - p0) / n)
z = (phat - p0) / se
print(f'z-value is {z}')
print(f'which means our observed sample proportion is {z} null SE above our hypothesized population proportion')
p_val = 1 - st.norm.cdf(z)
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
def one_proportion_phat_smaller(self, p0, phat, n, alpha):
# check for assumption
if (n * p0 >= 10 and n * (1 - p0) >= 10):
print('sample size is large enough')
else:
print('sample size is NOT large enough')
se = np.sqrt(p0 * (1 - p0) / n)
z = (p0 - phat) / se
print(f'z-value is {z}')
print(f'which means our observed sample proportion is {z} null SE above our hypothesized population proportion')
p_val = 1 - st.norm.cdf(z)
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
def two_proportion_two_sides(self, pos1_num, pos2_num, n1, n2, alpha):
m1, m2 = pos1_num, pos2_num # positive numbers
# check for assumption
phat = (m1 + m2) / (n1 + n2)
print('phat=', phat)
if (n1 * phat >= 10 and n1 * (1 - phat) >= 10 and n2 * phat >= 10 and n2 * (1 - phat) >= 10):
print('sample size is large enough')
else:
print('sample size is NOT large enough, should not use this method')
p1, p2 = m1 / n1, m2 / n2
se = np.sqrt(phat * (1 - phat) * (1 / n1 + 1 / n2))
z = (p1 - p2 - 0) / se
print(f'z-stat is {z}')
p_val = (1 - st.norm.cdf(abs(z))) * 2
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
def two_proportion_pos1_larger(self, pos1_num, pos2_num, n1, n2, alpha):
m1, m2 = pos1_num, pos2_num # positive numbers
# check for assumption
phat = (m1 + m2) / (n1 + n2)
print('phat=', phat)
if (n1 * phat >= 10 and n1 * (1 - phat) >= 10 and n2 * phat >= 10 and n2 * (1 - phat) >= 10):
print('sample size is large enough')
else:
print('sample size is NOT large enough, should not use this method')
# p1, p2 = m1 / n1, m2 / n2
p1, p2 = 0.52, 0.35
# se = np.sqrt(phat * (1 - phat) * (1 / n1 + 1 / n2))
se = 0.0338
z = (p1 - p2 - 0) / se
print(f'p1={p1} and p2={p2}')
print(f'z-stat is {z}')
# assert z > 0, "p1 > p2"
p_val = (1 - st.norm.cdf(abs(z)))
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
def two_proportion_pos1_smaller(self, pos1_num, pos2_num, n1, n2, alpha):
m1, m2 = pos1_num, pos2_num # positive numbers
# check for assumption
phat = (m1 + m2) / (n1 + n2)
print('phat=', phat)
if (n1 * phat >= 10 and n1 * (1 - phat) >= 10 and n2 * phat >= 10 and n2 * (1 - phat) >= 10):
print('sample size is large enough')
else:
print('sample size is NOT large enough, should not use this method')
p1, p2 = m1 / n1, m2 / n2
se = np.sqrt(phat * (1 - phat) * (1 / n1 + 1 / n2))
z = (p2 - p1 - 0) / se
print(f'p1={p1} and p2={p2}')
print(f'z-stat is {z}')
# assert z > 0, "p1 > p2"
p_val = (1 - st.norm.cdf(abs(z)))
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
def one_mean_two_sides(self, mu0, mu_hat, n, sd, alpha):
dof = n - 1
se = sd / np.sqrt(n)
t = (mu_hat - mu0) / se
print(f't-stat is {t}')
p_val = (1 - st.t.cdf(abs(t), df = dof)) * 2
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
# confidence
conf = 1 - alpha
t_conf = st.t.ppf(1 - (1 - conf) / 2, df=dof)
ci = (mu_hat - t_conf * se, mu_hat + t_conf * se)
print('0 exist in the CI?')
print(f'CI with {conf} confidence level = ', ci)
def one_mean_mu_hat_larger(self, mu0, mu_hat, n, sd, alpha):
dof = n - 1
se = sd / np.sqrt(n)
t = (mu_hat - mu0) / se
print(f't-stat is {t}')
p_val = (1 - st.t.cdf(abs(t), df = dof)) * 1
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
# confidence
conf = 1 - alpha
t_conf = st.t.ppf(1 - (1 - conf), df=dof)
ci = (mu_hat - t_conf * se, mu_hat + t_conf * se)
print('0 exist in the CI?')
print(f'CI with {conf} confidence level = ', ci)
def one_mean_mu_hat_smaller(self, mu0, mu_hat, n, sd, alpha):
dof = n - 1
se = sd / np.sqrt(n)
t = (mu0 - mu_hat) / se
print(f't-stat is {t}')
p_val = (1 - st.t.cdf(abs(t), df = dof)) * 1
print(f'p-value is {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
# confidence
conf = 1 - alpha
t_conf = st.t.ppf(1 - (1 - conf), df=dof)
ci = (mu_hat - t_conf * se, mu_hat + t_conf * se)
print('0 exist in the CI?')
print(f'CI with {conf} confidence level = ', ci)
def two_means_paired_two_sides(self, mu, sd, n, alpha):
dof = n - 1
se = sd / np.sqrt(n)
t = (mu - 0) / se
p_val = (1 - st.t.cdf(t, df= dof)) * 2
print(f't-val = ', t)
print(f'Our observed mean difference is {t} (estimated) SE above our null value of 0')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
# confidence
conf = 1 - alpha
t_conf = st.t.ppf(1 - (1 - conf)/2, df=dof)
ci = (mu - t_conf * se, mu + t_conf * se)
print('0 exist in the CI?')
print(f'CI with {conf} confidence level = ', ci)
def two_means_independent_unpooled(self, mu1, mu2, sd1, sd2, n1, n2, alpha):
dof = np.min([n1-1, n2-1])
se = np.sqrt(sd1**2 / n1 + sd2**2 / n2)
# se = 11.8831
t = abs((mu1 - mu2) / se) # pay attention here
print('dof=', dof)
print('t-value=', t, 'se=', se)
p_val = (1 - st.t.cdf(t, df = dof)) * 2 # if two sides
# p_val = (1 - st.t.cdf(t, df = dof)) # if one sides
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
# using CI (two side)
conf = 1 - alpha
t_conf = st.t.ppf(1 - (1 - conf)/2, df=dof)
ci = (mu1 - mu2 - t_conf * se, mu1 - mu2 + t_conf * se)
print('0 exist in the CI?')
print(f'CI with {conf} confidence level = ', ci)
def two_means_independent_pooled(self, mu1, mu2, sd1, sd2, n1, n2, alpha):
dof = n1 + n2 - 2
sp = np.sqrt(((n1 - 1) * sd1**2 + (n2 - 1) * sd2**2)/(n1 + n2 - 2))
se = sp * np.sqrt(1/n1 + 1/n2)
t = abs((mu1 - mu2) / se) # pay attention here
print('dof=', dof)
print('t-value=', t, 'se=', se)
p_val = (1 - st.t.cdf(t, df = dof)) * 2 # if two sides
# p_val = (1 - st.t.cdf(t, df = dof)) # if one sides
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
# using CI (two side)
conf = 1 - alpha
t_conf = st.t.ppf(1 - (1 - conf)/2, df=dof)
ci = (mu1 - mu2 - t_conf * se, mu1 - mu2 + t_conf * se)
print('0 exist in the CI?')
print(f'CI with {conf} confidence level = ', ci)
def chi_squared_homogeneity(self, df, alpha):
col_sum = df.sum(axis = 0)
row_sum = df.sum(axis = 1)
df_sum = df.values.sum()
df_vis = df.copy()
df_vis['total'] = row_sum
df_vis.loc['total'] = col_sum
df_vis.iloc[-1,-1] = df_sum
# df_vis
df_expected = df.copy()
if 1 in df.shape:
for i in range(df.shape[0]):
for j in range(df.shape[1]):
df_expected.iloc[i,j] = df_sum / np.multiply(*df.shape)
else:
for i in range(df.shape[0]):
for j in range(df.shape[1]):
# df_expected.iloc[i,j] = (row_sum[i] / df_sum) * (col_sum[j] / df_sum) * df_sum
df_expected.iloc[i,j] = (row_sum[i] / df_sum) * col_sum[j]
# df_expected
if np.all(df_expected.values.flatten() >= 5):
print('assumption is ok: every expected value is at least 5')
else:
print('assumption is NOT ok')
if 1 in df.shape:
ddof = len(df.values.flatten()) - 1
else:
ddof = (df.shape[0] - 1) * (df.shape[1] - 1)
df_e_flat = df_expected.values.flatten()
df_flat = df.values.flatten()
print(f'DOF = {ddof}')
chi2 = np.sum((df_flat - df_e_flat) **2 / (df_e_flat))
print(f'chi2 = {chi2}')
p_val = 1 - st.chi2.cdf(chi2, df = ddof)
print(f'p-value = {p_val}')
print(f'reject if p_value {p_val} < alpha {alpha}')
if p_val < alpha:
print('Reject!')
else:
print('Cannot reject.')
return {'vis': df_vis, 'expected': df_expected} | 35.406948 | 130 | 0.49576 | 2,212 | 14,269 | 3.094033 | 0.075949 | 0.045587 | 0.034191 | 0.019725 | 0.811368 | 0.807423 | 0.800555 | 0.783168 | 0.780684 | 0.770018 | 0 | 0.04707 | 0.349359 | 14,269 | 403 | 131 | 35.406948 | 0.690112 | 0.066578 | 0 | 0.757098 | 0 | 0 | 0.203525 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.063091 | false | 0 | 0.009464 | 0 | 0.082019 | 0.388013 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e3ac995ac5cdaa028ed7d480ff98807bcde67366 | 115 | py | Python | fcos_core/modeling/discriminator/__init__.py | manusheoran/EveryPixelMatters | 8635f7dd01c8d0cd388188f8a05a1247375e4b04 | [
"BSD-2-Clause"
] | 115 | 2020-08-03T06:27:02.000Z | 2022-03-30T06:58:35.000Z | fcos_core/modeling/discriminator/__init__.py | manusheoran/EveryPixelMatters | 8635f7dd01c8d0cd388188f8a05a1247375e4b04 | [
"BSD-2-Clause"
] | 15 | 2020-08-27T09:14:47.000Z | 2022-03-10T22:30:29.000Z | fcos_core/modeling/discriminator/__init__.py | manusheoran/EveryPixelMatters | 8635f7dd01c8d0cd388188f8a05a1247375e4b04 | [
"BSD-2-Clause"
] | 17 | 2020-11-17T12:03:38.000Z | 2022-03-30T10:00:24.000Z | from .fcos_head_discriminator import FCOSDiscriminator
from .fcos_head_discriminator_CA import FCOSDiscriminator_CA | 57.5 | 60 | 0.921739 | 14 | 115 | 7.142857 | 0.5 | 0.16 | 0.24 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.06087 | 115 | 2 | 60 | 57.5 | 0.925926 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e3b17ef01ad6e9e245591cdd07616045f3c37d95 | 78 | py | Python | main_script/__init__.py | boozebrewer/commons | d82794ecc3d218ca25d836c068d680d031383103 | [
"MIT"
] | null | null | null | main_script/__init__.py | boozebrewer/commons | d82794ecc3d218ca25d836c068d680d031383103 | [
"MIT"
] | null | null | null | main_script/__init__.py | boozebrewer/commons | d82794ecc3d218ca25d836c068d680d031383103 | [
"MIT"
] | null | null | null | def main():
print('hello from commons, to use "import commons" in python') | 39 | 66 | 0.692308 | 12 | 78 | 4.5 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.179487 | 78 | 2 | 66 | 39 | 0.84375 | 0 | 0 | 0 | 0 | 0 | 0.670886 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0.5 | 0 | 1 | 0.5 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 7 |
582211bcb0deea85fb12d06930caae70f4820111 | 19,903 | py | Python | src/cosmosdb-preview/azext_cosmosdb_preview/_help.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 2 | 2021-03-24T21:06:20.000Z | 2021-03-24T21:07:58.000Z | src/cosmosdb-preview/azext_cosmosdb_preview/_help.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 3 | 2020-05-27T20:16:26.000Z | 2020-07-23T19:46:49.000Z | src/cosmosdb-preview/azext_cosmosdb_preview/_help.py | Mannan2812/azure-cli-extensions | e2b34efe23795f6db9c59100534a40f0813c3d95 | [
"MIT"
] | 5 | 2020-05-09T17:47:09.000Z | 2020-10-01T19:52:06.000Z | # coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.help_files import helps # pylint: disable=unused-import
helps['cosmosdb create'] = """
type: command
short-summary: Create a new Azure Cosmos DB database account.
parameters:
- name: --locations
short-summary: Add a location to the Cosmos DB database account
long-summary: |
Usage: --locations KEY=VALUE [KEY=VALUE ...]
Required Keys: regionName, failoverPriority
Optional Key: isZoneRedundant
Default: single region account in the location of the specified resource group.
Failover priority values are 0 for write regions and greater than 0 for read regions. A failover priority value must be unique and less than the total number of regions.
Multiple locations can be specified by using more than one `--locations` argument.
- name: --databases-to-restore
short-summary: Add a database and its collection names to restore
long-summary: |
Usage: --databases-to-restore name=DatabaseName collections=collection1 [collection2 ...]
examples:
- name: Create a new Azure Cosmos DB database account.
text: az cosmosdb create --name MyCosmosDBDatabaseAccount --resource-group MyResourceGroup --subscription MySubscription
- name: Create a new Azure Cosmos DB database account with two regions. UK South is zone redundant.
text: az cosmosdb create -n myaccount -g mygroup --locations regionName=eastus failoverPriority=0 isZoneRedundant=False --locations regionName=uksouth failoverPriority=1 isZoneRedundant=True --enable-multiple-write-locations
- name: Create a new Azure Cosmos DB database account by restoring from an existing account in the given location
text: az cosmosdb create -n restoredaccount -g mygroup --is-restore-request true --restore-source /subscriptions/2296c272-5d55-40d9-bc05-4d56dc2d7588/providers/Microsoft.DocumentDB/locations/westus/restorableDatabaseAccounts/d056a4f8-044a-436f-80c8-cd3edbc94c68 --restore-timestamp 2020-07-13T16:03:41+0000 --locations regionName=westus failoverPriority=0 isZoneRedundant=False
"""
helps['cosmosdb restore'] = """
type: command
short-summary: Create a new Azure Cosmos DB database account by restoring from an existing database account.
parameters:
- name: --databases-to-restore
short-summary: Add a database and its collection names to restore
long-summary: |
Usage: --databases-to-restore name=DatabaseName collections=collection1 [collection2 ...]
Multiple databases can be specified by using more than one `--databases-to-restore` argument.
examples:
- name: Create a new Azure Cosmos DB database account by restoring from an existing database account.
text: az cosmosdb restore --target-database-account-name MyRestoredCosmosDBDatabaseAccount --account-name MySourceAccount --restore-timestamp 2020-07-13T16:03:41+0000 -g MyResourceGroup --location westus
- name: Create a new Azure Cosmos DB database account by restoring only the selected databases and collections from an existing database account.
text: az cosmosdb restore -g MyResourceGroup --target-database-account-name MyRestoredCosmosDBDatabaseAccount --account-name MySourceAccount --restore-timestamp 2020-07-13T16:03:41+0000 --location westus --databases-to-restore name=MyDB1 collections=collection1 collection2 --databases-to-restore name=MyDB2 collections=collection3 collection4
"""
helps['cosmosdb update'] = """
type: command
short-summary: Update an Azure Cosmos DB database account.
parameters:
- name: --locations
short-summary: Add a location to the Cosmos DB database account
long-summary: |
Usage: --locations KEY=VALUE [KEY=VALUE ...]
Required Keys: regionName, failoverPriority
Optional Key: isZoneRedundant
Default: single region account in the location of the specified resource group.
Failover priority values are 0 for write regions and greater than 0 for read regions. A failover priority value must be unique and less than the total number of regions.
Multiple locations can be specified by using more than one `--locations` argument.
examples:
- name: Update an Azure Cosmos DB database account.
text: az cosmosdb update --capabilities EnableGremlin --name MyCosmosDBDatabaseAccount --resource-group MyResourceGroup
- name: Update an new Azure Cosmos DB database account with two regions. UK South is zone redundant.
text: az cosmosdb update -n myaccount -g mygroup --locations regionName=eastus failoverPriority=0 isZoneRedundant=False --locations regionName=uksouth failoverPriority=1 isZoneRedundant=True --enable-multiple-write-locations
- name: Update the backup policy parameters of a database account with Periodic backup type.
text: az cosmosdb update -n myaccount -g mygroup --backup-interval 240 --backup-retention 24
"""
helps['cosmosdb restorable-database-account'] = """
type: group
short-summary: Manage restorable Azure Cosmos DB accounts.
"""
helps['cosmosdb restorable-database-account list'] = """
type: command
short-summary: List all the database accounts that can be restored.
"""
helps['cosmosdb restorable-database-account show'] = """
type: command
short-summary: Show the details of a database account that can be restored.
"""
helps['cosmosdb sql restorable-database'] = """
type: group
short-summary: Manage different versions of sql databases that are restorable in a Azure Cosmos DB account.
"""
helps['cosmosdb sql restorable-database list'] = """
type: command
short-summary: List all the versions of all the sql databases that were created / modified / deleted in the given restorable account.
"""
helps['cosmosdb sql restorable-container'] = """
type: group
short-summary: Manage different versions of sql containers that are restorable in a database of a Azure Cosmos DB account.
"""
helps['cosmosdb sql restorable-container list'] = """
type: command
short-summary: List all the versions of all the sql containers that were created / modified / deleted in the given database and restorable account.
"""
helps['cosmosdb sql restorable-resource'] = """
type: group
short-summary: Manage the databases and its containers that can be restored in the given account at the given timesamp and region.
"""
helps['cosmosdb sql restorable-resource list'] = """
type: command
short-summary: List all the databases and its containers that can be restored in the given account at the given timesamp and region.
"""
helps['cosmosdb mongodb restorable-database'] = """
type: group
short-summary: Manage different versions of mongodb databases that are restorable in a Azure Cosmos DB account.
"""
helps['cosmosdb mongodb restorable-database list'] = """
type: command
short-summary: List all the versions of all the mongodb databases that were created / modified / deleted in the given restorable account.
"""
helps['cosmosdb mongodb restorable-collection'] = """
type: group
short-summary: Manage different versions of mongodb collections that are restorable in a database of a Azure Cosmos DB account.
"""
helps['cosmosdb mongodb restorable-collection list'] = """
type: command
short-summary: List all the versions of all the mongodb collections that were created / modified / deleted in the given database and restorable account.
"""
helps['cosmosdb mongodb restorable-resource'] = """
type: group
short-summary: Manage the databases and its collections that can be restored in the given account at the given timesamp and region.
"""
helps['cosmosdb mongodb restorable-resource list'] = """
type: command
short-summary: List all the databases and its collections that can be restored in the given account at the given timesamp and region.
"""
helps['cosmosdb sql role'] = """
type: group
short-summary: Manage Azure Cosmos DB SQL role resources.
"""
helps['cosmosdb sql role definition'] = """
type: group
short-summary: Manage Azure Cosmos DB SQL role definitions.
"""
helps['cosmosdb sql role definition create'] = """
type: command
short-summary: Create a SQL role definition under an Azure Cosmos DB account.
examples:
- name: Create a SQL role definition under an Azure Cosmos DB account using a JSON string.
text: |
az cosmosdb sql role definition create --account-name MyAccount --resource-group MyResourceGroup --body '{
"Id": "be79875a-2cc4-40d5-8958-566017875b39",
"RoleName": "My Read Only Role",
"Type": "CustomRole",
"AssignableScopes": ["/dbs/mydb/colls/mycontainer"],
"Permissions": [{
"DataActions": [
"Microsoft.DocumentDB/databaseAccounts/readMetadata",
"Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/items/read",
"Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/executeQuery",
"Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers/readChangeFeed"
]
}]
}'
- name: Create a SQL role definition under an Azure Cosmos DB account using a JSON file.
text: az cosmosdb sql role definition create --account-name MyAccount --resource-group MyResourceGroup --body @role-definition.json
"""
helps['cosmosdb sql role definition delete'] = """
type: command
short-summary: Delete a SQL role definition under an Azure Cosmos DB account.
examples:
- name: Create a SQL role definition under an Azure Cosmos DB account.
text: az cosmosdb sql role definition delete --account-name MyAccount --resource-group MyResourceGroup --id be79875a-2cc4-40d5-8958-566017875b39
"""
helps['cosmosdb sql role definition exists'] = """
type: command
short-summary: Check if an Azure Cosmos DB role definition exists.
examples:
- name: Check if an Azure Cosmos DB role definition exists.
text: az cosmosdb sql role definition exists --account-name MyAccount --resource-group MyResourceGroup --id be79875a-2cc4-40d5-8958-566017875b39
"""
helps['cosmosdb sql role definition list'] = """
type: command
short-summary: List all SQL role definitions under an Azure Cosmos DB account.
examples:
- name: List all SQL role definitions under an Azure Cosmos DB account.
text: az cosmosdb sql role definition list --account-name MyAccount --resource-group MyResourceGroup
"""
helps['cosmosdb sql role definition show'] = """
type: command
short-summary: Show the properties of a SQL role definition under an Azure Cosmos DB account.
examples:
- name: Show the properties of a SQL role definition under an Azure Cosmos DB account.
text: az cosmosdb sql role definition show --account-name MyAccount --resource-group MyResourceGroup --id be79875a-2cc4-40d5-8958-566017875b39
"""
helps['cosmosdb sql role definition update'] = """
type: command
short-summary: Update a SQL role definition under an Azure Cosmos DB account.
examples:
- name: Update a SQL role definition under an Azure Cosmos DB account.
text: az cosmosdb sql role definition update --account-name MyAccount --resource-group MyResourceGroup --body @role-definition.json
"""
helps['cosmosdb sql role assignment'] = """
type: group
short-summary: Manage Azure Cosmos DB SQL role assignments.
"""
helps['cosmosdb sql role assignment create'] = """
type: command
short-summary: Create a SQL role assignment under an Azure Cosmos DB account.
examples:
- name: Create a SQL role assignment under an Azure Cosmos DB account using Role Definition Name.
text: |
az cosmosdb sql role assignment create --account-name MyAccount --resource-group MyResourceGroup \\
--role-assignment-id cb8ed2d7-2371-4e3c-bd31-6cc1560e84f8 \\
--role-definition-name "My Read Only Role" \\
--scope "/dbs/mydb/colls/mycontainer" \\
--principal-id 6328f5f7-dbf7-4244-bba8-fbb9d8066506
- name: Create a SQL role assignment under an Azure Cosmos DB account using Role Definition ID.
text: |
az cosmosdb sql role assignment create --account-name MyAccount --resource-group MyResourceGroup \\
--role-assignment-id cb8ed2d7-2371-4e3c-bd31-6cc1560e84f8 \\
--role-definition-id be79875a-2cc4-40d5-8958-566017875b39 \\
--scope "/dbs/mydb/colls/mycontainer" \\
--principal-id 6328f5f7-dbf7-4244-bba8-fbb9d8066506
"""
helps['cosmosdb sql role assignment delete'] = """
type: command
short-summary: Delete a SQL role assignment under an Azure Cosmos DB account.
examples:
- name: Delete a SQL role assignment under an Azure Cosmos DB account.
text: az cosmosdb sql role assignment delete --account-name MyAccount --resource-group MyResourceGroup --role-assignment-id cb8ed2d7-2371-4e3c-bd31-6cc1560e84f8
"""
helps['cosmosdb sql role assignment exists'] = """
type: command
short-summary: Check if an Azure Cosmos DB role assignment exists.
examples:
- name: Check if an Azure Cosmos DB role assignment exists.
text: az cosmosdb sql role assignment exists --account-name MyAccount --resource-group MyResourceGroup --role-assignment-id cb8ed2d7-2371-4e3c-bd31-6cc1560e84f8
"""
helps['cosmosdb sql role assignment list'] = """
type: command
short-summary: List all SQL role assignments under an Azure Cosmos DB account.
examples:
- name: List all SQL role assignments under an Azure Cosmos DB account.
text: az cosmosdb sql role assignment list --account-name MyAccount --resource-group MyResourceGroup
"""
helps['cosmosdb sql role assignment show'] = """
type: command
short-summary: Show the properties of a SQL role assignment under an Azure Cosmos DB account.
examples:
- name: Show the properties of a SQL role assignment under an Azure Cosmos DB account.
text: az cosmosdb sql role assignment show --account-name MyAccount --resource-group MyResourceGroup --role-assignment-id cb8ed2d7-2371-4e3c-bd31-6cc1560e84f8
"""
helps['cosmosdb sql role assignment update'] = """
type: command
short-summary: Update a SQL role assignment under an Azure Cosmos DB account.
examples:
- name: Update a SQL role assignment under an Azure Cosmos DB account.
text: |
az cosmosdb sql role assignment update --account-name MyAccount --resource-group MyResourceGroup \\
--role-assignment-id cb8ed2d7-2371-4e3c-bd31-6cc1560e84f8 \\
--role-definition-id updated-role-definition-id
"""
helps['managed-cassandra cluster'] = """
type: group
short-summary: Azure Managed Cassandra Cluster.
"""
helps['managed-cassandra cluster create'] = """
type: command
short-summary: Create a Managed Cassandra Cluster.
examples:
- name: Create a Managed Cassandra Cluster in a given Subscription and ResourceGroup. Either a cassandra admin password or external seed needs are required.
text: |
az managed-cassandra cluster create --cluster-name MyCluster --resource-group MyResourceGroup \\
--location westus2 \\
--delegated-management-subnet-id /subscriptions/94d9b402-77b4-4049-b4c1-947bc6b7729b/resourceGroups/My-vnet/providers/Microsoft.Network/virtualNetworks/test-vnet/subnets/test-subnet \\
--initial-cassandra-admin-password password
"""
helps['managed-cassandra cluster delete'] = """
type: command
short-summary: Deletes a Managed Cassandra Cluster.
examples:
- name: Deletes a Managed Cassandra Cluster in the given Subscription and ResourceGroup.
text: |
az managed-cassandra cluster delete --cluster-name MyCluster --resource-group MyResourceGroup \\
"""
helps['managed-cassandra cluster show'] = """
type: command
short-summary: Get a Managed Cassandra Cluster Resource.
examples:
- name: Gets a Managed Cassandra Cluster Resource. ProvisioningState tells the state of this cluster. If the cluster doesnot exist a NotFound response is returned.
text: |
az managed-cassandra cluster show --cluster-name MyCluster --resource-group MyResourceGroup \\
"""
helps['managed-cassandra cluster list'] = """
type: command
short-summary: Used to list the Managed Cassandra Clusters in a ResourceGroup and Subscription. If the ResourceGroup is not specified all the clusters in this Subscription are returned.
examples:
- name: List all Managed Cassandra Clusters in a given Subscription and ResourceGroup.
text: |
az managed-cassandra cluster list --resource-group MyResourceGroup \\
- name: List all Managed Cassandra Clusters in a given Subscription.
text: |
az managed-cassandra cluster list \\
"""
helps['managed-cassandra cluster node-status'] = """
type: command
short-summary: Gets Cassandra Cluster Node Status.
examples:
- name: Cassandra Cluster contains datacenters which inturn contains nodes. This command gets the status of these nodes.
text: |
az managed-cassandra cluster node-status --cluster-name MyCluster --resource-group MyResourceGroup \\
"""
helps['managed-cassandra datacenter'] = """
type: group
short-summary: Azure Managed Cassandra DataCenter.
"""
helps['managed-cassandra datacenter create'] = """
type: command
short-summary: Create a Datacenter in an Azure Managed Cassandra Cluster.
examples:
- name: Create a Managed Cassandra Datacenter in a Cassandra Cluster. Each datacenter should atleast have 3 nodes.
text: |
az managed-cassandra datacenter create --cluster-name MyCluster --data-center-name MyDataCenter \\
--resource-group MyResourceGroup \\
--data-center-location westus2 \\
--delegated-subnet-id /subscriptions/94d9b402-77b4-4049-b4c1-947bc6b7729b/resourceGroups/My-vnet/providers/Microsoft.Network/virtualNetworks/test-vnet/subnets/test-subnet \\
--node-count 3
"""
helps['managed-cassandra datacenter update'] = """
type: command
short-summary: Update a Datacenter in an Azure Managed Cassandra Cluster.
examples:
- name: Scale the number of nodes in a datacenter. This is a scale up operation assuming that the create datacenter was done with 3 nodes. Each datacenter should atleast have 3 nodes.
text: |
az managed-cassandra datacenter update --cluster-name MyCluster --data-center-name MyDataCenter \\
--resource-group MyResourceGroup \\
--node-count 6
- name: Scale the number of nodes in a datacenter. This is a scale down operation assuming that the create datacenter was done with 3 node, followed by a scale up to 6 nodes. Each datacenter should atleast have 3 nodes.
text: |
az managed-cassandra datacenter update --cluster-name MyCluster --data-center-name MyDataCenter \\
--resource-group MyResourceGroup \\
--node-count 4
"""
helps['managed-cassandra datacenter delete'] = """
type: command
short-summary: Deletes a Managed Cassandra Datacenter.
examples:
- name: Deletes a Managed Cassandra Datacenter in the given Cluster.
text: |
az managed-cassandra datacenter delete --data-center-name MyDataCenter --cluster-name MyCluster --resource-group MyResourceGroup \\
"""
helps['managed-cassandra datacenter show'] = """
type: command
short-summary: Get a Managed Cassandra DataCenter Resource.
examples:
- name: Gets a Managed Cassandra Datacenter Resource. ProvisioningState tells the state of this datacenter. If the datacenter doesnot exist a NotFound response is returned.
text: |
az managed-cassandra datacenter show --data-center-name MyDataCenter --cluster-name MyCluster --resource-group MyResourceGroup \\
"""
helps['managed-cassandra datacenter list'] = """
type: command
short-summary: Used to list the Managed Cassandra Datacenters in a given Cluster.
examples:
- name: List all Managed Cassandra DataCenters in a given Cluster.
text: |
az managed-cassandra datacenter list --cluster-name MyCluster --resource-group MyResourceGroup \\
"""
| 48.90172 | 381 | 0.745114 | 2,546 | 19,903 | 5.82443 | 0.122938 | 0.025491 | 0.038573 | 0.051183 | 0.854002 | 0.798638 | 0.7602 | 0.719469 | 0.705712 | 0.64448 | 0 | 0.027344 | 0.160277 | 19,903 | 406 | 382 | 49.022167 | 0.859929 | 0.019042 | 0 | 0.491525 | 0 | 0.118644 | 0.951632 | 0.115284 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.00565 | 0.002825 | 0 | 0.002825 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
583f5d3eb1fbce54884441957d80e33b580c3686 | 2,025 | py | Python | python/leetcode/easy/test/test_ex0206.py | vilisimo/ads | cba2b04db6fd2755e32d0e3f2e4480fd808155f5 | [
"MIT"
] | null | null | null | python/leetcode/easy/test/test_ex0206.py | vilisimo/ads | cba2b04db6fd2755e32d0e3f2e4480fd808155f5 | [
"MIT"
] | null | null | null | python/leetcode/easy/test/test_ex0206.py | vilisimo/ads | cba2b04db6fd2755e32d0e3f2e4480fd808155f5 | [
"MIT"
] | null | null | null | from leetcode.easy.ex0206 import (
InitialIterativeSolution,
RecursiveSolution,
ListNode,
)
class TestInitialIterativeSolution:
def test_recognizes_null_node(self):
assert not InitialIterativeSolution().reverseList(None)
def test_reverses_one_node_list(self):
root = ListNode(val=1)
result = InitialIterativeSolution().reverseList(root)
assert result == root
assert not result.next
def test_reverses_two_node_list(self):
n1 = ListNode(val=1)
root = ListNode(val=1, next=n1)
result = InitialIterativeSolution().reverseList(root)
assert result == n1
assert result.next == root
assert not result.next.next
def test_reverses_three_node_list(self):
n2 = ListNode(val=1)
n1 = ListNode(val=1, next=n2)
root = ListNode(val=1, next=n1)
result = InitialIterativeSolution().reverseList(root)
assert result == n2
assert result.next == n1
assert result.next.next == root
assert not result.next.next.next
class TestRecursiveSolution:
def test_recognizes_null_node(self):
assert not RecursiveSolution().reverseList(None)
def test_reverses_one_node_list(self):
root = ListNode(val=1)
result = RecursiveSolution().reverseList(root)
assert result == root
assert not result.next
def test_reverses_two_node_list(self):
n1 = ListNode(val=1)
root = ListNode(val=1, next=n1)
result = RecursiveSolution().reverseList(root)
assert result == n1
assert result.next == root
assert not result.next.next
def test_reverses_three_node_list(self):
n2 = ListNode(val=1)
n1 = ListNode(val=1, next=n2)
root = ListNode(val=1, next=n1)
result = RecursiveSolution().reverseList(root)
assert result == n2
assert result.next == n1
assert result.next.next == root
assert not result.next.next.next
| 26.644737 | 63 | 0.647901 | 234 | 2,025 | 5.478632 | 0.141026 | 0.102964 | 0.112324 | 0.074883 | 0.861934 | 0.861934 | 0.829953 | 0.829953 | 0.770671 | 0.770671 | 0 | 0.022742 | 0.261728 | 2,025 | 75 | 64 | 27 | 0.834783 | 0 | 0 | 0.830189 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.377358 | 1 | 0.150943 | false | 0 | 0.018868 | 0 | 0.207547 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
58586e7495925d3b1330544b74ce389f2d16d0ad | 77 | py | Python | src/references/__init__.py | Matimed/Barbarism | 4892092f24f314bc6cfacc1c780436dc59fc90ac | [
"MIT"
] | 2 | 2021-09-09T14:03:40.000Z | 2021-11-03T03:35:55.000Z | src/references/__init__.py | Matimed/Barbarism | 4892092f24f314bc6cfacc1c780436dc59fc90ac | [
"MIT"
] | null | null | null | src/references/__init__.py | Matimed/Barbarism | 4892092f24f314bc6cfacc1c780436dc59fc90ac | [
"MIT"
] | null | null | null | from src.references.biome import Biome
from src.references.layer import Layer | 38.5 | 38 | 0.857143 | 12 | 77 | 5.5 | 0.5 | 0.212121 | 0.515152 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 77 | 2 | 39 | 38.5 | 0.942857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
5877571b8cebcf080c1bb6f8e429d7aa078aca5e | 17,389 | py | Python | pyflux/gas/tests/gas_llt_tests.py | ThomasHoppe/pyflux | 297f2afc2095acd97c12e827dd500e8ea5da0c0f | [
"BSD-3-Clause"
] | 2,091 | 2016-04-01T02:52:10.000Z | 2022-03-29T11:38:15.000Z | pyflux/gas/tests/gas_llt_tests.py | EricSchles/pyflux | 297f2afc2095acd97c12e827dd500e8ea5da0c0f | [
"BSD-3-Clause"
] | 160 | 2016-04-26T14:52:18.000Z | 2022-03-15T02:09:07.000Z | pyflux/gas/tests/gas_llt_tests.py | EricSchles/pyflux | 297f2afc2095acd97c12e827dd500e8ea5da0c0f | [
"BSD-3-Clause"
] | 264 | 2016-05-02T14:03:31.000Z | 2022-03-29T07:48:20.000Z | import numpy as np
import pyflux as pf
noise = np.random.normal(0,1,200)
data = np.zeros(200)
for i in range(1,len(data)):
data[i] = 1.0*data[i-1] + noise[i]
countdata = np.random.poisson(3,200)
def test_couple_terms():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit()
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_couple_terms_integ():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, integ=1, family=pf.GASNormal())
x = model.fit()
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_bbvi():
"""
Tests an GAS model estimated with BBVI and that the length of the latent variable
list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit('BBVI',iterations=100)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_mh():
"""
Tests an GAS model estimated with Metropolis-Hastings and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit('M-H',nsims=300)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace():
"""
Tests an GAS model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_pml():
"""
Tests a PML model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_predict_length():
"""
Tests that the prediction dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit()
x.summary()
assert(model.predict(h=5).shape[0] == 5)
def test_predict_is_length():
"""
Tests that the prediction IS dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit()
assert(model.predict_is(h=5).shape[0] == 5)
def test_predict_nans():
"""
Tests that the predictions are not nans
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit()
x.summary()
assert(len(model.predict(h=5).values[np.isnan(model.predict(h=5).values)]) == 0)
def test_predict_is_nans():
"""
Tests that the in-sample predictions are not nans
"""
model = pf.GASLLT(data=data, family=pf.GASNormal())
x = model.fit()
x.summary()
assert(len(model.predict_is(h=5).values[np.isnan(model.predict_is(h=5).values)]) == 0)
def test_t_couple_terms():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit()
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_couple_terms_integ():
"""
Tests an GAS model with 1 AR and 1 MA term, integrated once, and that
the latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, integ=1, family=pf.GASt())
x = model.fit()
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_bbvi():
"""
Tests an GAS model estimated with BBVI and that the length of the latent variable
list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit('BBVI',iterations=100)
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_mh():
"""
Tests an GAS model estimated with Metropolis-Hastings and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit('M-H',nsims=300)
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_laplace():
"""
Tests an GAS model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_pml():
"""
Tests a PML model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_t_predict_length():
"""
Tests that the prediction dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit()
x.summary()
assert(model.predict(h=5).shape[0] == 5)
def test_t_predict_is_length():
"""
Tests that the prediction IS dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit()
assert(model.predict_is(h=5).shape[0] == 5)
def test_t_predict_nans():
"""
Tests that the predictions are not nans
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit()
x.summary()
assert(len(model.predict(h=5).values[np.isnan(model.predict(h=5).values)]) == 0)
def test_t_predict_is_nans():
"""
Tests that the in-sample predictions are not nans
"""
model = pf.GASLLT(data=data, family=pf.GASt())
x = model.fit()
x.summary()
assert(len(model.predict_is(h=5).values[np.isnan(model.predict_is(h=5).values)]) == 0)
def test_skewt_couple_terms():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit()
assert(len(model.latent_variables.z_list) == 5)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_skewt_couple_terms_integ():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, integ=1, family=pf.GASSkewt())
x = model.fit()
assert(len(model.latent_variables.z_list) == 5)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_skewt_bbvi():
"""
Tests an GAS model estimated with BBVI and that the length of the latent variable
list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit('BBVI',iterations=100)
assert(len(model.latent_variables.z_list) == 5)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_skewt_mh():
"""
Tests an GAS model estimated with Metropolis-Hastings and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit('M-H',nsims=300)
assert(len(model.latent_variables.z_list) == 5)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
""" Uncomment in future if Skewt becomes more robust
def test_skewt_laplace():
Tests an GAS model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 4)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
"""
def test_skewt_pml():
"""
Tests a PML model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 5)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_skewt_predict_length():
"""
Tests that the prediction dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit()
x.summary()
assert(model.predict(h=5).shape[0] == 5)
def test_skewt_predict_is_length():
"""
Tests that the prediction IS dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit()
assert(model.predict_is(h=5).shape[0] == 5)
def test_skewt_predict_nans():
"""
Tests that the predictions are not nans
model = pf.GASLLT(data=data, family=pf.GASSkewt())
"""
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit()
x.summary()
assert(len(model.predict(h=5).values[np.isnan(model.predict(h=5).values)]) == 0)
"""
def test_skewt_predict_is_nans():
Tests that the in-sample predictions are not nans
model = pf.GASLLT(data=data, family=pf.GASSkewt())
x = model.fit()
x.summary()
assert(len(model.predict_is(h=5).values[np.isnan(model.predict_is(h=5).values)]) == 0)
"""
def test_laplace_couple_terms():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit()
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace_couple_terms_integ():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=data, integ=1, family=pf.GASLaplace())
x = model.fit()
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace_bbvi():
"""
Tests an GAS model estimated with BBVI and that the length of the latent variable
list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit('BBVI',iterations=100)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace_mh():
"""
Tests an GAS model estimated with Metropolis-Hastings and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit('M-H',nsims=300)
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace_laplace():
"""
Tests an GAS model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace_pml():
"""
Tests a PML model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 3)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_laplace_predict_length():
"""
Tests that the prediction dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit()
x.summary()
assert(model.predict(h=5).shape[0] == 5)
def test_laplace_predict_is_length():
"""
Tests that the prediction IS dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit()
assert(model.predict_is(h=5).shape[0] == 5)
def test_laplace_predict_nans():
"""
Tests that the predictions are not nans
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit()
x.summary()
assert(len(model.predict(h=5).values[np.isnan(model.predict(h=5).values)]) == 0)
def test_laplace_predict_is_nans():
"""
Tests that the in-sample predictions are not nans
"""
model = pf.GASLLT(data=data, family=pf.GASLaplace())
x = model.fit()
x.summary()
assert(len(model.predict_is(h=5).values[np.isnan(model.predict_is(h=5).values)]) == 0)
def test_poisson_couple_terms():
"""
Tests latent variable list length is correct, and that the estimated
latent variables are not nan
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit()
assert(len(model.latent_variables.z_list) == 2)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_poisson_bbvi():
"""
Tests an GAS model estimated with BBVI and that the length of the latent variable
list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit('BBVI',iterations=100)
assert(len(model.latent_variables.z_list) == 2)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_poisson_mh():
"""
Tests an GAS model estimated with Metropolis-Hastings and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit('M-H',nsims=300)
assert(len(model.latent_variables.z_list) == 2)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_poisson_laplace():
"""
Tests an GAS model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit('Laplace')
assert(len(model.latent_variables.z_list) == 2)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_poisson_pml():
"""
Tests a PML model estimated with Laplace approximation and that the length of the
latent variable list is correct, and that the estimated latent variables are not nan
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit('PML')
assert(len(model.latent_variables.z_list) == 2)
lvs = np.array([i.value for i in model.latent_variables.z_list])
assert(len(lvs[np.isnan(lvs)]) == 0)
def test_poisson_predict_length():
"""
Tests that the prediction dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit()
x.summary()
assert(model.predict(h=5).shape[0] == 5)
def test_poisson_predict_is_length():
"""
Tests that the prediction IS dataframe length is equal to the number of steps h
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit()
assert(model.predict_is(h=5).shape[0] == 5)
def test_poisson_predict_nans():
"""
Tests that the predictions are not nans
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit()
x.summary()
assert(len(model.predict(h=5).values[np.isnan(model.predict(h=5).values)]) == 0)
def test_poisson_predict_is_nans():
"""
Tests that the in-sample predictions are not nans
"""
model = pf.GASLLT(data=countdata, family=pf.GASPoisson())
x = model.fit()
x.summary()
assert(len(model.predict_is(h=5).values[np.isnan(model.predict_is(h=5).values)]) == 0) | 33.896686 | 87 | 0.718615 | 2,904 | 17,389 | 4.21281 | 0.036157 | 0.10667 | 0.094818 | 0.099559 | 0.979075 | 0.979075 | 0.978176 | 0.974906 | 0.974906 | 0.971636 | 0 | 0.011365 | 0.139801 | 17,389 | 513 | 88 | 33.896686 | 0.806525 | 0.30433 | 0 | 0.781132 | 0 | 0 | 0.007159 | 0 | 0 | 0 | 0 | 0 | 0.283019 | 1 | 0.177358 | false | 0 | 0.007547 | 0 | 0.184906 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
543307c610c6d64d4ea7aae91a68bb506f603f4a | 25,733 | py | Python | cFSPlib/python_api_client/swagger_client/api/mantle_architecture_api.py | cloudFPGA/cFSP | f6d93ff8eddb774711064e59e4dc4f931d803d5f | [
"Apache-2.0"
] | 2 | 2022-01-31T10:19:13.000Z | 2022-02-15T06:07:04.000Z | cFSPlib/python_api_client/swagger_client/api/mantle_architecture_api.py | cloudFPGA/cFSP | f6d93ff8eddb774711064e59e4dc4f931d803d5f | [
"Apache-2.0"
] | 1 | 2022-01-24T16:16:52.000Z | 2022-01-25T19:21:52.000Z | cFSPlib/python_api_client/swagger_client/api/mantle_architecture_api.py | cloudFPGA/cFSP | f6d93ff8eddb774711064e59e4dc4f931d803d5f | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
cloudFPGA Resource Manager API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 0.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class MantleArchitectureApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def cf_manager_rest_api_get_composable_logic_all_part(self, username, password, part, **kwargs): # noqa: E501
"""Returns all composable logics of the given part that are `IN_USE` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_all_part(username, password, part, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param str part: The part of the composable logics (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cf_manager_rest_api_get_composable_logic_all_part_with_http_info(username, password, part, **kwargs) # noqa: E501
else:
(data) = self.cf_manager_rest_api_get_composable_logic_all_part_with_http_info(username, password, part, **kwargs) # noqa: E501
return data
def cf_manager_rest_api_get_composable_logic_all_part_with_http_info(self, username, password, part, **kwargs): # noqa: E501
"""Returns all composable logics of the given part that are `IN_USE` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_all_part_with_http_info(username, password, part, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param str part: The part of the composable logics (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'password', 'part'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cf_manager_rest_api_get_composable_logic_all_part" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `cf_manager_rest_api_get_composable_logic_all_part`") # noqa: E501
# verify the required parameter 'password' is set
if ('password' not in params or
params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `cf_manager_rest_api_get_composable_logic_all_part`") # noqa: E501
# verify the required parameter 'part' is set
if ('part' not in params or
params['part'] is None):
raise ValueError("Missing the required parameter `part` when calling `cf_manager_rest_api_get_composable_logic_all_part`") # noqa: E501
collection_formats = {}
path_params = {}
if 'part' in params:
path_params['part'] = params['part'] # noqa: E501
query_params = []
if 'username' in params:
query_params.append(('username', params['username'])) # noqa: E501
if 'password' in params:
query_params.append(('password', params['password'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/composablelogic/by_part/{part}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cf_manager_rest_api_get_composable_logic_all_prp(self, username, password, prp, **kwargs): # noqa: E501
"""Returns all composable logics of the given prp-type that are `IN_USE` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_all_prp(username, password, prp, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param int prp: The prp-level of the composable logics (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cf_manager_rest_api_get_composable_logic_all_prp_with_http_info(username, password, prp, **kwargs) # noqa: E501
else:
(data) = self.cf_manager_rest_api_get_composable_logic_all_prp_with_http_info(username, password, prp, **kwargs) # noqa: E501
return data
def cf_manager_rest_api_get_composable_logic_all_prp_with_http_info(self, username, password, prp, **kwargs): # noqa: E501
"""Returns all composable logics of the given prp-type that are `IN_USE` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_all_prp_with_http_info(username, password, prp, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param int prp: The prp-level of the composable logics (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'password', 'prp'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cf_manager_rest_api_get_composable_logic_all_prp" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `cf_manager_rest_api_get_composable_logic_all_prp`") # noqa: E501
# verify the required parameter 'password' is set
if ('password' not in params or
params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `cf_manager_rest_api_get_composable_logic_all_prp`") # noqa: E501
# verify the required parameter 'prp' is set
if ('prp' not in params or
params['prp'] is None):
raise ValueError("Missing the required parameter `prp` when calling `cf_manager_rest_api_get_composable_logic_all_prp`") # noqa: E501
collection_formats = {}
path_params = {}
if 'prp' in params:
path_params['prp'] = params['prp'] # noqa: E501
query_params = []
if 'username' in params:
query_params.append(('username', params['username'])) # noqa: E501
if 'password' in params:
query_params.append(('password', params['password'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/composablelogic/by_prp/{prp}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cf_manager_rest_api_get_composable_logic_all_shell_type(self, username, password, shell_type, **kwargs): # noqa: E501
"""Returns all composable logics of the given shell-type that are `IN_USE` # noqa: E501
If the resulting list is empty, the shell_type is invalid (or no such composalbe logics exist). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_all_shell_type(username, password, shell_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param str shell_type: Name of cloudFPGA Shell (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cf_manager_rest_api_get_composable_logic_all_shell_type_with_http_info(username, password, shell_type, **kwargs) # noqa: E501
else:
(data) = self.cf_manager_rest_api_get_composable_logic_all_shell_type_with_http_info(username, password, shell_type, **kwargs) # noqa: E501
return data
def cf_manager_rest_api_get_composable_logic_all_shell_type_with_http_info(self, username, password, shell_type, **kwargs): # noqa: E501
"""Returns all composable logics of the given shell-type that are `IN_USE` # noqa: E501
If the resulting list is empty, the shell_type is invalid (or no such composalbe logics exist). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_all_shell_type_with_http_info(username, password, shell_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param str shell_type: Name of cloudFPGA Shell (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'password', 'shell_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cf_manager_rest_api_get_composable_logic_all_shell_type" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `cf_manager_rest_api_get_composable_logic_all_shell_type`") # noqa: E501
# verify the required parameter 'password' is set
if ('password' not in params or
params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `cf_manager_rest_api_get_composable_logic_all_shell_type`") # noqa: E501
# verify the required parameter 'shell_type' is set
if ('shell_type' not in params or
params['shell_type'] is None):
raise ValueError("Missing the required parameter `shell_type` when calling `cf_manager_rest_api_get_composable_logic_all_shell_type`") # noqa: E501
collection_formats = {}
path_params = {}
if 'shell_type' in params:
path_params['shell_type'] = params['shell_type'] # noqa: E501
query_params = []
if 'username' in params:
query_params.append(('username', params['username'])) # noqa: E501
if 'password' in params:
query_params.append(('password', params['password'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/composablelogic/by_shell/{shell_type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cf_manager_rest_api_get_composable_logic_dcp(self, username, password, cl_id, **kwargs): # noqa: E501
"""Get the dcp file of a composable logic # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_dcp(username, password, cl_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param int cl_id: ID of a composable logic (Static Shell or Mantles) (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cf_manager_rest_api_get_composable_logic_dcp_with_http_info(username, password, cl_id, **kwargs) # noqa: E501
else:
(data) = self.cf_manager_rest_api_get_composable_logic_dcp_with_http_info(username, password, cl_id, **kwargs) # noqa: E501
return data
def cf_manager_rest_api_get_composable_logic_dcp_with_http_info(self, username, password, cl_id, **kwargs): # noqa: E501
"""Get the dcp file of a composable logic # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_dcp_with_http_info(username, password, cl_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param int cl_id: ID of a composable logic (Static Shell or Mantles) (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'password', 'cl_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cf_manager_rest_api_get_composable_logic_dcp" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `cf_manager_rest_api_get_composable_logic_dcp`") # noqa: E501
# verify the required parameter 'password' is set
if ('password' not in params or
params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `cf_manager_rest_api_get_composable_logic_dcp`") # noqa: E501
# verify the required parameter 'cl_id' is set
if ('cl_id' not in params or
params['cl_id'] is None):
raise ValueError("Missing the required parameter `cl_id` when calling `cf_manager_rest_api_get_composable_logic_dcp`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cl_id' in params:
path_params['cl_id'] = params['cl_id'] # noqa: E501
query_params = []
if 'username' in params:
query_params.append(('username', params['username'])) # noqa: E501
if 'password' in params:
query_params.append(('password', params['password'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/composablelogic/{cl_id}/dcp', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def cf_manager_rest_api_get_composable_logic_meta(self, username, password, cl_id, **kwargs): # noqa: E501
"""Get the meta data of a composable logic # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_meta(username, password, cl_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param int cl_id: ID of a composable logic (Static Shell or Mantles) (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.cf_manager_rest_api_get_composable_logic_meta_with_http_info(username, password, cl_id, **kwargs) # noqa: E501
else:
(data) = self.cf_manager_rest_api_get_composable_logic_meta_with_http_info(username, password, cl_id, **kwargs) # noqa: E501
return data
def cf_manager_rest_api_get_composable_logic_meta_with_http_info(self, username, password, cl_id, **kwargs): # noqa: E501
"""Get the meta data of a composable logic # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cf_manager_rest_api_get_composable_logic_meta_with_http_info(username, password, cl_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: OpenStack username (required)
:param str password: OpenStack password (required)
:param int cl_id: ID of a composable logic (Static Shell or Mantles) (required)
:return: Image
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'password', 'cl_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method cf_manager_rest_api_get_composable_logic_meta" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `cf_manager_rest_api_get_composable_logic_meta`") # noqa: E501
# verify the required parameter 'password' is set
if ('password' not in params or
params['password'] is None):
raise ValueError("Missing the required parameter `password` when calling `cf_manager_rest_api_get_composable_logic_meta`") # noqa: E501
# verify the required parameter 'cl_id' is set
if ('cl_id' not in params or
params['cl_id'] is None):
raise ValueError("Missing the required parameter `cl_id` when calling `cf_manager_rest_api_get_composable_logic_meta`") # noqa: E501
collection_formats = {}
path_params = {}
if 'cl_id' in params:
path_params['cl_id'] = params['cl_id'] # noqa: E501
query_params = []
if 'username' in params:
query_params.append(('username', params['username'])) # noqa: E501
if 'password' in params:
query_params.append(('password', params['password'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/composablelogic/{cl_id}/meta', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Image', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.545133 | 160 | 0.641161 | 3,137 | 25,733 | 4.976729 | 0.056742 | 0.040482 | 0.041635 | 0.051243 | 0.949398 | 0.944594 | 0.944594 | 0.931719 | 0.922047 | 0.92115 | 0 | 0.013114 | 0.274006 | 25,733 | 564 | 161 | 45.625887 | 0.822556 | 0.333307 | 0 | 0.752412 | 1 | 0 | 0.233969 | 0.092524 | 0 | 0 | 0 | 0 | 0 | 1 | 0.03537 | false | 0.160772 | 0.012862 | 0 | 0.099678 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 9 |
54809a6304b43637d6a0ac2b23bd747706327531 | 163 | py | Python | pommerman/loss.py | amy-cao/playground | 68a09ed0687dfd334b472090c3053600d4997c67 | [
"Apache-2.0"
] | 2 | 2018-11-10T08:31:13.000Z | 2018-11-13T08:16:45.000Z | pommerman/loss.py | amy-cao/playground | 68a09ed0687dfd334b472090c3053600d4997c67 | [
"Apache-2.0"
] | null | null | null | pommerman/loss.py | amy-cao/playground | 68a09ed0687dfd334b472090c3053600d4997c67 | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
def softmax_cross_entropy_with_logits(y_true, y_pred):
return tf.nn.softmax_cross_entropy_with_logits_v2(labels=y_true, logits=y_pred) | 40.75 | 83 | 0.846626 | 29 | 163 | 4.310345 | 0.586207 | 0.192 | 0.304 | 0.368 | 0.464 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006711 | 0.08589 | 163 | 4 | 83 | 40.75 | 0.832215 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0.333333 | 0.333333 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 8 |
b70d171a761532614916f8d2291c8cde2622fbd2 | 114 | py | Python | openhands/models/ssl/__init__.py | AI4Bharat/OpenHands | a3c2c416395d70c7eb63294d955a84d1c8ea4410 | [
"Apache-2.0"
] | 13 | 2021-10-09T14:42:40.000Z | 2022-03-21T10:40:50.000Z | openhands/models/ssl/__init__.py | AI4Bharat/OpenHands | a3c2c416395d70c7eb63294d955a84d1c8ea4410 | [
"Apache-2.0"
] | 15 | 2021-10-10T03:20:44.000Z | 2022-03-16T03:19:14.000Z | openhands/models/ssl/__init__.py | AI4Bharat/OpenHands | a3c2c416395d70c7eb63294d955a84d1c8ea4410 | [
"Apache-2.0"
] | 2 | 2022-03-05T14:25:08.000Z | 2022-03-17T07:31:44.000Z | from .dpc_rnn import DPC_RNN_Pretrainer, DPC_RNN_Finetuner
__all__ = ["DPC_RNN_Pretrainer", "DPC_RNN_Finetuner"]
| 28.5 | 58 | 0.824561 | 17 | 114 | 4.764706 | 0.411765 | 0.37037 | 0.395062 | 0.469136 | 0.765432 | 0.765432 | 0 | 0 | 0 | 0 | 0 | 0 | 0.087719 | 114 | 3 | 59 | 38 | 0.778846 | 0 | 0 | 0 | 0 | 0 | 0.307018 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
3f8a62886a0ee3f2fcdbe189f8dcacf3c253943a | 1,838 | py | Python | tests/datatypes/test_slice.py | cherub96/voc | 2692d56059e4d4a52768270feaf5179b23609b04 | [
"BSD-3-Clause"
] | 1 | 2021-01-03T00:59:50.000Z | 2021-01-03T00:59:50.000Z | tests/datatypes/test_slice.py | cherub96/voc | 2692d56059e4d4a52768270feaf5179b23609b04 | [
"BSD-3-Clause"
] | null | null | null | tests/datatypes/test_slice.py | cherub96/voc | 2692d56059e4d4a52768270feaf5179b23609b04 | [
"BSD-3-Clause"
] | null | null | null | from .. utils import TranspileTestCase
class SliceTests(TranspileTestCase):
def test_slice_list(self):
self.assertCodeExecution("""
x = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
print("x[:] = ", x[:])
print("x[5:] = ", x[5:])
print("x[:5] = ", x[:5])
print("x[2:8] = ", x[2:8])
print("x[::2] = ", x[::2])
print("x[5::2] = ", x[5::2])
print("x[:5:2] = ", x[:5:2])
print("x[2:8:2] = ", x[2:8:2])
""")
def test_slice_range(self):
self.assertCodeExecution("""
x = range(0, 10)
print("x[:] = ", x[:])
print("x[5:] = ", x[5:])
print("x[:5] = ", x[:5])
print("x[2:8] = ", x[2:8])
print("x[::2] = ", x[::2])
print("x[5::2] = ", x[5::2])
print("x[:5:2] = ", x[:5:2])
print("x[2:8:2] = ", x[2:8:2])
""")
def test_slice_string(self):
self.assertCodeExecution("""
x = "0123456789a"
print("x[:] = ", x[:])
print("x[5:] = ", x[5:])
print("x[:5] = ", x[:5])
print("x[2:8] = ", x[2:8])
print("x[::2] = ", x[::2])
print("x[5::2] = ", x[5::2])
print("x[:5:2] = ", x[:5:2])
print("x[2:8:2] = ", x[2:8:2])
""")
def test_slice_tuple(self):
self.assertCodeExecution("""
x = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
print("x[:] = ", x[:])
print("x[5:] = ", x[5:])
print("x[:5] = ", x[:5])
print("x[2:8] = ", x[2:8])
print("x[::2] = ", x[::2])
print("x[5::2] = ", x[5::2])
print("x[:5:2] = ", x[:5:2])
print("x[2:8:2] = ", x[2:8:2])
""")
| 30.633333 | 50 | 0.334603 | 257 | 1,838 | 2.361868 | 0.11284 | 0.31631 | 0.184514 | 0.105437 | 0.721582 | 0.721582 | 0.721582 | 0.721582 | 0.721582 | 0.721582 | 0 | 0.119069 | 0.392274 | 1,838 | 59 | 51 | 31.152542 | 0.424351 | 0 | 0 | 0.8 | 0 | 0.04 | 0.797062 | 0 | 0 | 0 | 0 | 0 | 0.08 | 1 | 0.08 | false | 0 | 0.02 | 0 | 0.12 | 0.64 | 0 | 0 | 0 | null | 1 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 11 |
4d018cea2984a89520b7ae41f589db5cec59db53 | 1,305 | py | Python | grammer_practice/class_extend_why_use_super.py | NiroDu/python-tricks | 27d504655b1fd7417bd0e6058293209814efcc21 | [
"MIT"
] | null | null | null | grammer_practice/class_extend_why_use_super.py | NiroDu/python-tricks | 27d504655b1fd7417bd0e6058293209814efcc21 | [
"MIT"
] | null | null | null | grammer_practice/class_extend_why_use_super.py | NiroDu/python-tricks | 27d504655b1fd7417bd0e6058293209814efcc21 | [
"MIT"
] | null | null | null | # 经典的菱形继承案例,BC 继承 A,然后 D 继承 BC,创造一个 D 的对象
# ---> B ---
# A --| |--> D
# ---> C ---
# 使用 super() 可以很好地避免构造函数被调用两次。
# 先是不使用super()看看效果:
class A:
def __init__(self):
print('enter A')
print('leave A')
class B(A):
def __init__(self):
print('enter B')
A.__init__(self)
print('leave B')
class C(A):
def __init__(self):
print('enter C')
A.__init__(self)
print('leave C')
class D(B, C):
def __init__(self):
print('enter D')
B.__init__(self)
C.__init__(self)
print('leave D')
d = D()
# enter D
# enter B
# enter A
# leave A
# leave B
# enter C
# enter A
# leave A
# leave C
# leave D
# 再使用super()看看输出
# class A():
# def __init__(self):
# print('enter A')
# print('leave A')
#
#
# class B(A):
# def __init__(self):
# print('enter B')
# super().__init__()
# print('leave B')
#
#
# class C(A):
# def __init__(self):
# print('enter C')
# super().__init__()
# print('leave C')
#
#
# class D(B, C):
# def __init__(self):
# print('enter D')
# super().__init__()
# print('leave D')
#
#
# d = D()
# enter D
# enter B
# enter C
# enter A
# leave A
# leave C
# leave B
# leave D
| 15 | 41 | 0.488123 | 176 | 1,305 | 3.278409 | 0.136364 | 0.166378 | 0.247834 | 0.221837 | 0.766031 | 0.714038 | 0.714038 | 0.714038 | 0.714038 | 0.714038 | 0 | 0 | 0.335632 | 1,305 | 86 | 42 | 15.174419 | 0.665513 | 0.562452 | 0 | 0.285714 | 0 | 0 | 0.108317 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.190476 | false | 0 | 0 | 0 | 0.380952 | 0.380952 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4d0e28b916512f55bf03bc5ece47e65bd25b9701 | 10,455 | py | Python | pybench/Strings.py | haypo/pymicrobench | 7c6b92deaf5cf0c3fc965fcfcbc6a78f7d0d10f4 | [
"MIT"
] | 3 | 2018-01-17T18:45:23.000Z | 2020-10-02T06:26:03.000Z | pybench/Strings.py | vstinner/pymicrobench | 7c6b92deaf5cf0c3fc965fcfcbc6a78f7d0d10f4 | [
"MIT"
] | null | null | null | pybench/Strings.py | vstinner/pymicrobench | 7c6b92deaf5cf0c3fc965fcfcbc6a78f7d0d10f4 | [
"MIT"
] | 4 | 2018-01-17T18:45:23.000Z | 2020-10-08T15:24:51.000Z | import pyperf
from six.moves import intern, xrange
from pybench import Test
class ConcatStrings(Test):
version = 2.0
operations = 10 * 5
inner_loops = 10 * 5
def test(self, loops):
# Make sure the strings are *not* interned
s = ''.join(map(str, range(100)))
t = ''.join(map(str, range(1, 101)))
range_it = xrange(loops)
t0 = pyperf.perf_counter()
for _ in range_it:
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
t + s
return pyperf.perf_counter() - t0
class CompareStrings(Test):
version = 2.0
operations = 10 * 5
inner_loops = 10 * 5
def test(self, loops):
# Make sure the strings are *not* interned
s = ''.join(map(str, range(10)))
t = ''.join(map(str, range(10))) + "abc"
range_it = xrange(loops)
t0 = pyperf.perf_counter()
for _ in range_it:
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
t < s
t > s
t == s
t > s
t < s
return pyperf.perf_counter() - t0
class CompareInternedStrings(Test):
version = 2.0
operations = 10 * 5
inner_loops = 10 * 5
def test(self, loops):
# Make sure the strings *are* interned
s = intern(''.join(map(str, range(10))))
t = s
range_it = xrange(loops)
t0 = pyperf.perf_counter()
for _ in range_it:
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
t == s
t == s
t >= s
t > s
t < s
return pyperf.perf_counter() - t0
class CreateStringsWithConcat(Test):
version = 2.0
operations = 10 * 5
def test(self, loops):
range_it = xrange(loops)
t0 = pyperf.perf_counter()
for _ in range_it:
s = 'om'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
s = s + 'xax'
s = s + 'xbx'
s = s + 'xcx'
s = s + 'xdx'
s = s + 'xex'
return pyperf.perf_counter() - t0
class StringSlicing(Test):
version = 2.0
operations = 5 * 7
inner_loops = 5
def test(self, loops):
s = ''.join(map(str, range(100)))
range_it = xrange(loops)
t0 = pyperf.perf_counter()
for _ in range_it:
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
s[50:]
s[:25]
s[50:55]
s[-1:]
s[:1]
s[2:]
s[11:-11]
return pyperf.perf_counter() - t0
# String methods
if hasattr('', 'lower'):
class StringMappings(Test):
version = 2.0
operations = 3 * (5 + 4 + 2 + 1)
def test(self, loops):
s = ''.join(map(chr, range(20)))
t = ''.join(map(chr, range(50)))
u = ''.join(map(chr, range(100)))
v = ''.join(map(chr, range(256)))
range_it = xrange(loops)
t0 = pyperf.perf_counter()
for _ in range_it:
s.lower()
s.lower()
s.lower()
s.lower()
s.lower()
s.upper()
s.upper()
s.upper()
s.upper()
s.upper()
s.title()
s.title()
s.title()
s.title()
s.title()
t.lower()
t.lower()
t.lower()
t.lower()
t.upper()
t.upper()
t.upper()
t.upper()
t.title()
t.title()
t.title()
t.title()
u.lower()
u.lower()
u.upper()
u.upper()
u.title()
u.title()
v.lower()
v.upper()
v.title()
return pyperf.perf_counter() - t0
class StringPredicates(Test):
version = 2.0
operations = 10 * 7
inner_loops = 10
def test(self, loops):
data = ('abc', '123', ' ', '\xe4\xf6\xfc', '\xdf' * 10)
len_data = len(data)
range_it = xrange(loops)
t0 = pyperf.perf_counter()
for i in range_it:
s = data[i % len_data]
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
s.isalnum()
s.isalpha()
s.isdigit()
s.islower()
s.isspace()
s.istitle()
s.isupper()
return pyperf.perf_counter() - t0
| 19.183486 | 69 | 0.28044 | 1,092 | 10,455 | 2.647436 | 0.083333 | 0.104462 | 0.152542 | 0.20339 | 0.838464 | 0.793497 | 0.738499 | 0.695953 | 0.685922 | 0.651332 | 0 | 0.041524 | 0.608417 | 10,455 | 544 | 70 | 19.21875 | 0.664631 | 0.012721 | 0 | 0.921429 | 0 | 0 | 0.017641 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016667 | false | 0 | 0.007143 | 0 | 0.102381 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4d42aa101ca7b61672807fa9592e3a1a45e0643d | 147 | py | Python | check-cuda.py | bagheria/nchardoc2vec | 40e994a8a60a9d7bafccc675105357656d1b0897 | [
"MIT"
] | null | null | null | check-cuda.py | bagheria/nchardoc2vec | 40e994a8a60a9d7bafccc675105357656d1b0897 | [
"MIT"
] | null | null | null | check-cuda.py | bagheria/nchardoc2vec | 40e994a8a60a9d7bafccc675105357656d1b0897 | [
"MIT"
] | null | null | null | import torch
print(torch.cuda.current_device())
print(torch.cuda.device(0))
print(torch.cuda.device_count())
print(torch.cuda.get_device_name(0))
| 21 | 36 | 0.789116 | 24 | 147 | 4.666667 | 0.416667 | 0.357143 | 0.5 | 0.357143 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.014286 | 0.047619 | 147 | 6 | 37 | 24.5 | 0.785714 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.2 | 0 | 0.2 | 0.8 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
4d9f9032913a586d6221f740f82dcc924130b11b | 8,705 | py | Python | example/cta_multi_signal/nr.py | vincent87lee/alphahunter | 5f45dbd5f09354dd161606f7e740f8c8d8ae2772 | [
"MIT"
] | 149 | 2019-12-05T05:26:15.000Z | 2022-03-15T03:44:46.000Z | example/cta_multi_signal/nr.py | webclinic017/alphahunter | e3ccc10bb8b641a6a516ec7cd908e5b006343264 | [
"MIT"
] | 4 | 2020-09-12T20:46:06.000Z | 2021-09-01T16:39:14.000Z | example/cta_multi_signal/nr.py | webclinic017/alphahunter | e3ccc10bb8b641a6a516ec7cd908e5b006343264 | [
"MIT"
] | 73 | 2019-11-29T03:13:11.000Z | 2022-03-24T06:06:31.000Z | # -*- coding:utf-8 -*-
"""
固定数量模式CTA: Normalized Return Model
Project: alphahunter
Author: HJQuant
Description: Asynchronous driven quantitative trading framework
"""
import numpy as np
import pandas as pd
from quant import const
from quant.market import Kline
from quant.interface.model_api import ModelAPI
from quant.interface.ah_math import AHMath
from quant.interface.ta_lib import TaLib
from quant.interface.kline_generator import KlineGenerator
class RsiModel(object):
def __init__(self):
#这个model订阅‘BTC/USDT’
self.symbols = ['BTC/USDT']
self.mode_params = {
'fixed_volume': 0.04, #每次买卖0.04个btc
'rsiWindow': 14,
'rsiLong': 50+20,
'rsiShort': 50-20,
}
self.running_status = 'running'
self.factor = np.nan
self.signal = np.nan #model返回的信号值,这个值是介于-1.0到1.0之间的一个浮点数
self.target_position = {'BTC': 0}
self.last_kline_end_dt = None
self.latency = 2*60*1000 #两分钟
self.talib = TaLib()
async def on_time(self):
''' 每5秒定时被驱动,检查k线是否断连'''
if self.running_status == 'stopping': #如果是停止状态就不工作了
return
now = ModelAPI.current_milli_timestamp()
if self.last_kline_end_dt == None:
self.last_kline_end_dt = now
if now - self.last_kline_end_dt > self.latency: #超过2分钟
self.factor = np.nan
self.signal = np.nan
self.target_position['BTC'] = 0.0
self.running_status = 'stopping'
async def on_kline_update_callback(self, kline: Kline):
''' 最新1分钟k线来了,我们需要更新此model的signal'''
if self.running_status == 'stopping': #如果是停止状态就不工作了
return
if kline.symbol not in self.symbols:
return
self.last_kline_end_dt = kline.end_dt
self.talib.kline_update(kline)
if not self.talib.inited:
return
self.generate_factor() #产生因子
self.generate_signal() #通过因子生成信号
self.generate_target_position() #通过信号生成仓位
def generate_factor(self):
self.factor = self.talib.rsi(self.mode_params['rsiWindow'])
def generate_signal(self):
if self.factor >= self.mode_params['rsiLong']:
self.signal = 1.0
elif self.factor <= self.mode_params['rsiShort']:
self.signal = -1.0
else:
self.signal = np.nan
def generate_target_position(self):
if self.target_position['BTC'] == 0 and self.signal == 1:
self.target_position['BTC'] = self.signal * self.mode_params['fixed_volume']
elif self.target_position['BTC'] == 0 and self.signal == -1:
self.target_position['BTC'] = self.signal * self.mode_params['fixed_volume']
elif self.target_position['BTC'] > 0 and self.signal == -1:
self.target_position['BTC'] = 0
elif self.target_position['BTC'] < 0 and self.signal == 1:
self.target_position['BTC'] = 0
elif self.target_position['BTC'] != 0 and np.isnan(self.signal):
self.target_position['BTC'] = 0
class CciModel(object):
def __init__(self):
#这个model订阅‘BTC/USDT’
self.symbols = ['BTC/USDT']
self.mode_params = {
'fixed_volume': 0.04, #每次买卖0.04个btc
'cciWindow': 30,
'cciLong': 10,
'cciShort': -10,
}
self.running_status = 'running'
self.factor = np.nan
self.signal = np.nan #model返回的信号值,这个值是介于-1.0到1.0之间的一个浮点数
self.target_position = {'BTC': 0}
self.last_kline_end_dt = None
self.latency = 2*60*1000 #两分钟
self.talib = TaLib()
async def on_time(self):
''' 每5秒定时被驱动,检查k线是否断连'''
if self.running_status == 'stopping': #如果是停止状态就不工作了
return
now = ModelAPI.current_milli_timestamp()
if self.last_kline_end_dt == None:
self.last_kline_end_dt = now
if now - self.last_kline_end_dt > self.latency: #超过2分钟
self.factor = np.nan
self.signal = np.nan
self.target_position['BTC'] = 0.0
self.running_status = 'stopping'
async def on_kline_update_callback(self, kline: Kline):
''' 最新1分钟k线来了,我们需要更新此model的signal'''
if self.running_status == 'stopping': #如果是停止状态就不工作了
return
if kline.symbol not in self.symbols:
return
self.last_kline_end_dt = kline.end_dt
self.talib.kline_update(kline)
if not self.talib.inited:
return
self.generate_factor() #产生因子
self.generate_signal() #通过因子生成信号
self.generate_target_position() #通过信号生成仓位
def generate_factor(self):
self.factor = self.talib.cci(self.mode_params['cciWindow'])
def generate_signal(self):
if self.factor >= self.mode_params['cciLong']:
self.signal = 1.0
elif self.factor <= self.mode_params['cciShort']:
self.signal = -1.0
else:
self.signal = np.nan
def generate_target_position(self):
if self.target_position['BTC'] == 0 and self.signal == 1:
self.target_position['BTC'] = self.signal * self.mode_params['fixed_volume']
elif self.target_position['BTC'] == 0 and self.signal == -1:
self.target_position['BTC'] = self.signal * self.mode_params['fixed_volume']
elif self.target_position['BTC'] > 0 and self.signal == -1:
self.target_position['BTC'] = 0
elif self.target_position['BTC'] < 0 and self.signal == 1:
self.target_position['BTC'] = 0
elif self.target_position['BTC'] != 0 and np.isnan(self.signal):
self.target_position['BTC'] = 0
class MaModel(object):
def __init__(self):
#这个model订阅‘BTC/USDT’
self.symbols = ['BTC/USDT']
self.mode_params = {
'fixed_volume': 0.04, #每次买卖0.04个btc
'fastWindow': 5,
'slowWindow': 20,
}
self.running_status = 'running'
self.factor_fastma = np.nan
self.factor_slowma = np.nan
self.signal = np.nan #model返回的信号值,这个值是介于-1.0到1.0之间的一个浮点数
self.target_position = {'BTC': 0}
self.last_kline_end_dt = None
self.latency = 2*60*1000 #两分钟
self.talib = TaLib(24) #5*24=120分钟
self.kg = KlineGenerator(None, const.MARKET_TYPE_KLINE_5M, self.on_5m_kline_update_callback)
async def on_time(self):
''' 每5秒定时被驱动,检查k线是否断连'''
if self.running_status == 'stopping': #如果是停止状态就不工作了
return
now = ModelAPI.current_milli_timestamp()
if self.last_kline_end_dt == None:
self.last_kline_end_dt = now
if now - self.last_kline_end_dt > self.latency: #超过2分钟
self.factor_fastma = np.nan
self.factor_slowma = np.nan
self.signal = np.nan
self.target_position['BTC'] = 0.0
self.running_status = 'stopping'
async def on_kline_update_callback(self, kline: Kline):
if self.running_status == 'stopping': #如果是停止状态就不工作了
return
if kline.symbol not in self.symbols:
return
self.last_kline_end_dt = kline.end_dt
await self.kg.update_bar(kline)
async def on_5m_kline_update_callback(self, kline: Kline):
''' 最新5分钟k线来了,我们需要更新此model的signal'''
self.talib.kline_update(kline)
if not self.talib.inited:
return
self.generate_factor() #产生因子
self.generate_signal() #通过因子生成信号
self.generate_target_position() #通过信号生成仓位
def generate_factor(self):
self.factor_fastma = self.talib.sma(self.mode_params['fastWindow'])
self.factor_slowma = self.talib.sma(self.mode_params['slowWindow'])
def generate_signal(self):
if self.factor_fastma > self.factor_slowma:
self.signal = 1.0
elif self.factor_fastma < self.factor_slowma:
self.signal = -1.0
else:
self.signal = np.nan
def generate_target_position(self):
if self.target_position['BTC'] == 0 and self.signal == 1:
self.target_position['BTC'] = self.signal * self.mode_params['fixed_volume']
elif self.target_position['BTC'] == 0 and self.signal == -1:
self.target_position['BTC'] = self.signal * self.mode_params['fixed_volume']
elif self.target_position['BTC'] > 0 and self.signal == -1:
self.target_position['BTC'] = 0
elif self.target_position['BTC'] < 0 and self.signal == 1:
self.target_position['BTC'] = 0
elif self.target_position['BTC'] != 0 and np.isnan(self.signal):
self.target_position['BTC'] = 0 | 35.67623 | 100 | 0.609765 | 1,080 | 8,705 | 4.725 | 0.125926 | 0.115226 | 0.126984 | 0.148148 | 0.858319 | 0.853028 | 0.836371 | 0.825593 | 0.825593 | 0.825593 | 0 | 0.023455 | 0.275129 | 8,705 | 244 | 101 | 35.67623 | 0.785261 | 0.059621 | 0 | 0.810526 | 0 | 0 | 0.05886 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.063158 | false | 0 | 0.042105 | 0 | 0.184211 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4dddf01bc1b1174ad7eb769c5afe644de968efbb | 98,377 | py | Python | data/python/857cb7db10f2bd3f2a5000f5956e8b53_torr2xbmc.py | maxim5/code-inspector | 14812dfbc7bac1d76c4d9e5be2cdf83fc1c391a1 | [
"Apache-2.0"
] | 5 | 2018-01-03T06:43:07.000Z | 2020-07-30T13:15:29.000Z | data/python/857cb7db10f2bd3f2a5000f5956e8b53_torr2xbmc.py | maxim5/code-inspector | 14812dfbc7bac1d76c4d9e5be2cdf83fc1c391a1 | [
"Apache-2.0"
] | null | null | null | data/python/857cb7db10f2bd3f2a5000f5956e8b53_torr2xbmc.py | maxim5/code-inspector | 14812dfbc7bac1d76c4d9e5be2cdf83fc1c391a1 | [
"Apache-2.0"
] | 2 | 2019-11-04T02:54:49.000Z | 2020-04-24T17:50:46.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
import httplib
import urllib
import urllib2
import re
import sys
import os
import socket
import xbmcplugin
import xbmcgui
import xbmcaddon
import xbmc
import xbmcaddon
import datetime
from BeautifulSoup import BeautifulSoup, BeautifulStoneSoup
from TSCore import TSengine as tsengine
import base64
import time
from database import DataBase
hos = int(sys.argv[1])
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
__addon__ = xbmcaddon.Addon( id = 'plugin.video.torrent.tv' )
__language__ = __addon__.getLocalizedString
addon_icon = __addon__.getAddonInfo('icon')
addon_fanart = __addon__.getAddonInfo('fanart')
addon_path = __addon__.getAddonInfo('path')
addon_type = __addon__.getAddonInfo('type')
addon_id = __addon__.getAddonInfo('id')
addon_author = __addon__.getAddonInfo('author')
addon_name = __addon__.getAddonInfo('name')
addon_version = __addon__.getAddonInfo('version')
prt_file=__addon__.getSetting('port_path')
adult = __addon__.getSetting('adult')
login = __addon__.getSetting("login")
passw = __addon__.getSetting("password")
autostart = __addon__.getSetting('autostart')
ch_color = __addon__.getSetting('ch_color')
prog_color = __addon__.getSetting("prog_color")
ch_b = __addon__.getSetting("ch_b")
prog_b = __addon__.getSetting('prog_b')
prog_str = __addon__.getSetting('prog_str')
ch_i = __addon__.getSetting("ch_i")
prog_i = __addon__.getSetting('prog_i')
archive = __addon__.getSetting('archive')
aceport=62062
cookie = ""
PLUGIN_DATA_PATH = xbmc.translatePath( os.path.join( "special://profile/addon_data", 'plugin.video.torrent.tv') )
if (sys.platform == 'win32') or (sys.platform == 'win64'):
PLUGIN_DATA_PATH = PLUGIN_DATA_PATH.decode('utf-8')
if prog_str == "true": pr_str = " "
else: pr_str = chr(10)
PROGRAM_SOURCE_PATH = os.path.join( PLUGIN_DATA_PATH , "%s_inter-tv.zip" % datetime.date.today().strftime("%W") )
db_name = os.path.join(PLUGIN_DATA_PATH, 'tvbase.db')
cookiefile = os.path.join(PLUGIN_DATA_PATH, 'cookie.txt')
xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
data = urllib.urlencode({
'email' : login,
'password' : passw,
'remember' : 1,
'enter' : 'enter'
})
############################
if __addon__.getSetting('fanart') == 'false':xbmcplugin.setContent(int(sys.argv[1]), 'movies')
if __addon__.getSetting('fanart') == 'true':xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
############################
try:
if prt_file:
gf = open(prt_file, 'r')
aceport=int(gf.read())
gf.close()
except: prt_file=None
if not prt_file:
try:
fpath= os.path.expanduser("~")
pfile= os.path.join(fpath,'AppData\Roaming\TorrentStream\engine' ,'acestream.port')
gf = open(pfile, 'r')
aceport=int(gf.read())
gf.close()
__addon__.setSetting('port_path',pfile)
print aceport
except: aceport=62062
def construct_request(params):
return '%s?%s' % (sys.argv[0], urllib.urlencode(params))
def GET(target, post=None):
try:
req = urllib2.Request(url = target, data = post)
req.add_header('User-Agent', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) ; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET4.0C)')
global cookie
if cookie != "":
req.add_header('Cookie', cookie)
if cookie == "":
if os.path.exists(cookiefile):
fgetcook = open(cookiefile, 'r')
cookie = fgetcook.read()
del fgetcook
try:
req.add_header('Cookie', cookie)
resp = urllib2.urlopen(req)
http = resp.read()
if not http.find('????') > 1:
return http
else:
cookie = UpdCookie()
req.add_header('Cookie', cookie)
resp = urllib2.urlopen(req)
http = resp.read()
if not http.find('????') > 1:
return http
else:
showMessage('Torrent TV', '?????? ???????????', 3000)
return http
resp.close()
except:
cookie = UpdCookie()
req.add_header('Cookie', cookie)
else:
cookie = UpdCookie()
req.add_header('Cookie', cookie)
resp = urllib2.urlopen(req)
http = resp.read()
resp.close()
return http
except Exception, e:
xbmc.log( '[%s]: GET EXCEPT [%s]' % (addon_id, e), 4 )
showMessage('HTTP ERROR', e, 5000)
def showMessage(message = '', heading='TorrentTV', times = 3000, pics = addon_icon):
try: xbmc.executebuiltin('XBMC.Notification("%s", "%s", %s, "%s")' % (heading.encode('utf-8'), message.encode('utf-8'), times, pics.encode('utf-8')))
except Exception, e:
#xbmc.log( '[%s]: showMessage: Transcoding UTF-8 failed [%s]' % (addon_id, e), 2 )
try: xbmc.executebuiltin('XBMC.Notification("%s", "%s", %s, "%s")' % (heading, message, times, pics))
except Exception, e:
xbmc.log( '[%s]: showMessage: exec failed [%s]' % (addon_id, e), 3 )
def GetCookie(target, post=None):
try:
req = urllib2.Request(url = target, data = post)
req.add_header('User-Agent', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) ; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET4.0C)')
resp = urllib2.urlopen(req)
cookie = resp.headers['Set-Cookie'].split(";")[0]
http=resp.read()
if not http.find('????') > 1:
showMessage('Torrent TV', '???????? ???????????', 3000)
return cookie
else: showMessage('Torrent TV', '?????? ???????????', 3000)
except Exception, e:
xbmc.log( '[%s]: GET COOKIE EXCEPT [%s]' % (addon_id, e), 4 )
showMessage('HTTP ERROR: '+str(target), e, 5000)
def UpdCookie():
if not os.path.exists(PLUGIN_DATA_PATH):
os.makedirs(PLUGIN_DATA_PATH)
if os.path.exists(cookiefile):
os.remove(cookiefile)
out = open(cookiefile, 'w')
cookie = ''
if GetCookie('http://torrent-tv.ru/auth.php', data) == None:
if GetCookie('http://1ttv.org/auth.php', data) == None:
return None
else:
cookie = GetCookie('http://1ttv.org/auth.php', data)
else:
cookie = GetCookie('http://torrent-tv.ru/auth.php', data)
try:
out.write(cookie)
out.close()
return cookie
except:
showMessage('Torrent TV', '?????? ???????????')
return None
def GetScript(params):
import time
xbmc.executebuiltin( "ActivateWindow(%d)" % ( 10147 ) )
window = xbmcgui.Window( 10147 )
########################
try:
import YaTv
ncl=dx[params['title']]
#print ncl
txtProgram=YaTv.GetPrDay(ncl)
print txtProgram
except:
txtProgram='??? ?????????'
#########################
xbmc.sleep(13)
window.getControl(1).setLabel(ch['name'])
window.getControl(5).setText(txtProgram)
#####################################
dx={
"1+1": "620",
"112 ???????": "921vsetv",
"112": "921vsetv",
"100 ??": "382vsetv",
"2+2": "583",
"24 ???": "16",
"24 ?????": "710",
"24 ???????": "298vsetv",
"2x2": "323",
"365 ????": "250",
"5 ????? (???????)": "586",
"5 ????? ???????": "586",
"8 ?????": "217",
"9 ?? ??????": "782vsetv",
"ab moteurs": "127vsetv",
"Amedia 1": "895vsetv",
"Amedia Premium": "896vsetv",
"Amazing Life": "658",
"Amedia 2": "918",
"Animal Planet": "365",
"Animal Planet HD": "990",
"ATR": "763vsetv",
"A-One": "680",
"A-ONE UA": "772vsetv",
"AXN Sci-Fi": "516",
"SONY Sci-Fi": "516",
"Sony Sci-Fi": "516",
"BBC World News": "828",
"Bridge TV": "151",
"Business": "386vsetv",
"Cartoon Network": "601",
"CCTV 4": "904vsetv",
"CCTV ???????": "598vsetv",
"CBS Drama": "911",
"CBS Reality": "912",
"CNN International": "47vsetv",
"CNL": "392vsetv",
"Comedy TV": "51",
"C Music": "319",
"Da Vinci Learning": "410",
"DIVA Universal Russia": "713",
"Dobro TV": "937",
"Discovery Channel": "325",
"Discovery Science": "409",
"Discovery World": "437",
"Investigation Discovery Europe": "19",
"Investigation Discovery": "19",
"Daring TV": "696vsetv",
"Discovery HD Showcase": "111",
"Discowery HD Showcase": "111",
"Discovery Showcase HD": "111",
"Disney Channel": "150",
"English Club TV": "757",
"Enter Film": "281",
"EuroNews": "23",
"EuroNews UA": "23",
"Europa Plus TV": "681",
"Eurosport": "737",
"Eurosport Int. (Eng)": "737",
"Eurosport 2": "850",
"Eurosport 2 Int. (Eng)": "850",
"Eurosport 2 HD": "850",
"Eurosport HD": "560",
"Extreme Sports": "288",
"Fashion TV": "661",
"Fashion TV HD": "121",
"Fashion HD": "121",
"Fashion One HD": "919",
"Fashion One": "919",
"Fox": "659",
"FOX HD": "659",
"Fox HD": "659",
"Fox Life": "615",
"FOX life HD": "464",
"FOX Life HD": "464",
"Fox Life HD": "464",
"France 24": "187",
"France24": "187",
"Galaxy TV": "924",
"GALAXY": "924",
"Gulli": "810",
"GLAS": "457vsetv",
"HD Life": "415",
"HD ?????": "429",
"HD ?????": "429",
"History Channel": "902vsetv",
"Hustler TV": "666vsetv",
"ICTV": "709",
"iConcerts TV HD": "797vsetv",
"JimJam": "494",
"Jewish News One": "796vsetv",
"JN1": "796vsetv",
"Kids co": "598",
"KidsCo": "598",
"Lale": "911vsetv",
"Look TV": "726vsetv",
"Luxe TV HD": "536vsetv",
"Maxxi-TV": "228",
"MCM Top": "533",
"MGM": "608",
"MGM HD": "934",
"Mezzo": "575",
"Motor TV": "531",
"Motors TV": "531",
"Motors Tv": "531",
"MTV Russia": "1021",
"MTV ??????": "1021",
"MTV Ukraina": "353vsetv",
"MTV Dance": "332",
"MTV Hits UK": "849",
"MTV Rocks": "388",
"MTV Music": "430",
"MTV live HD": "382",
"MTV Live HD": "382",
"Music Box UA": "417vsetv",
"Music Box": "642",
"Russian Music Box": "25",
"myZen.tv HD": "141",
"MyZen TV HD": "141",
"NBA TV": "790vsetv",
"Nat Geo Wild": "807",
"Nat Geo Wild HD": "807",
"National Geographic": "102",
"National Geographic HD": "389",
"News One": "247",
"Nick Jr.": "917",
"Nickelodeon": "567",
"Nickelodeon HD": "423",
"Ocean-TV": "55",
"O-TV": "167",
"Outdoor HD": "322",
"Outdoor Channel": "322",
"Paramount Comedy": "920",
"Playboy TV": "663vsetv",
"Private Spice": "143vsetv",
"Brazzers TV Europe": "143vsetv",
"QTV": "280",
"Real Estate-TV": "481vsetv",
"RTVi": "76",
"RU TV": "258",
"Ru Music": "388vsetv",
"Rusong TV": "591",
"Russian Travel Guide": "648",
"Russia Today Documentary": "788vsetv",
"SHOPping-TV (Ukraine)": "810vsetv",
"SET": "311",
"SET HD": "311",
"S.E.T": "311",
"Sony Turbo": "935",
"Smile of Child": "789",
"Star TV Ukraine": "513vsetv",
"STV": "165",
"Style TV": "119",
"Style tv": "119",
"Teen TV": "448vsetv",
"TiJi": "555",
"TLC": "425",
"TLC Europe": "777vsetv",
"Tonis": "627",
"Tonis HD": "627",
"TVCI": "435",
"TV Rus": "799vsetv",
"TV 1000": "127",
"TV1000": "127",
"TV 1000 Action East": "125",
"TV 1000 Action": "125",
"TV 1000 ??????? ????": "267",
"TV1000 Action East": "125",
"TV1000 Action": "125",
"TV1000 ??????? ????": "267",
"TV1000 Megahit HD": "816vsetv",
"TV1000 Premium HD": "814vsetv",
"TV1000 Comedy HD": "818vsetv",
"TV 1000 Megahit HD": "816vsetv",
"TV 1000 Premium HD": "814vsetv",
"TV 1000 Comedy HD": "818vsetv",
"Travel Channel": "88vsetv",
"Travel Channel HD": "690vsetv",
"Travel+ adventure": "832vsetv",
"TV XXI (TV21)": "309",
#"Ukrainian Fashion": "939",
"Universal Channel": "213",
"Ukrainian Fashion": "773vsetv",
"VH1": "491",
"VH1 Classic": "156",
"Viasat Explorer": "521",
"Viasat Explorer CEE": "521",
"Viasat History": "277",
"Viasat Nature East": "765",
"Viasat Sport": "455",
"Viasat Sport HD": "455",
"VIASAT Sport Baltic": "504vsetv",
"Viasat Sport Baltic": "504vsetv",
"Sport Baltic": "504vsetv",
"Viasat Nature-History HD": "716vsetv",
"Viasat Nature/History HD": "716vsetv",
"World Fashion": "346",
"XSPORT": "748vsetv",
"Xsport": "748vsetv",
"XXL": "664vsetv",
"Zee TV": "626",
"Zoom": "1009",
"???? ????": "153",
"???? ??": "11",
"??????": "918",
"????? ??": "249",
"?????": "249",
"???????? 24": "851",
"??????": "481vsetv",
"????": "454",
"?????????? ????": "986",
"???": "877vsetv",
"???": "272vsetv",
"??????? ? ??????": "333",
"?????": "669",
"???": "139vsetv",
"?????": "479vsetv",
"????": "294vsetv",
"????? ??": "505vsetv",
"???????": "66",
"??????? ???": "747",
"?????": "384",
"????? HD": "384",
"??? ????": "834",
"???????? ????????": "520",
"???????? ????????": "520",
"????????": "304",
"???????? ???????": "695vsetv",
"????? ??": "505",
"????????": "352",
"???": "931",
"???": "931",
"??? ??": "931",
"??? HD": "930",
"????": "113",
"??????": "405",
"????? ??": "178",
"????? ??": "178",
"?????-??": "178",
"????? ??": "178",
"??????????": "705",
"?????????? ?????": "21",
"??????": "201",
"???????? ??": "595",
"??? ??": "273",
"???????": "367",
"????????+": "123",
"?????????": "754vsetv",
"?????": "798",
"?????": "677",
"?????+": "808",
"?????????? ??": "24",
"??????? ????": "532vsetv",
"???????": "879vsetv",
"?1": "453",
"?2": "20vsetv",
"????????": "740",
"?????????": "22",
"??????? ??": "821",
"????????????? ??????": "852",
"??? ???? ???": "769",
"??? ???? ???": "769",
"???": "149vsetv",
"???????? ????": "149vsetv",
"????? ??": "614",
"???????? ???????": "285vsetv",
"??? ??": "481",
"??? HD": "481",
"??-?????": "257",
"????? ??": "920vsetv",
"?1": "632",
"?2": "445vsetv",
"????": "788",
"???? ??": "348vsetv",
"???": "726",
"??? ???????": "145",
"??? ???????": "145",
"????? ??": "799",
"??? ???????": "675",
"?????? 24": "334",
"?????? ???????": "655",
"?????? ???????": "655",
"?????? ???????": "715",
"???????": "82",
"??????? ??": "606vsetv",
"??? ??????": "761vsetv",
"???? ? ????": "618",
"???? ? ????": "618",
"???????????": "31",
"??? ??": "808vsetv",
"?????": "871vsetv",
"???i?": "871vsetv",
"???? ??": "35",
"????? 2.0": "723",
"???? ??????? ????": "477",
"??? ??": "843vsetv",
"????? ?????": "128",
"??????????": "783",
"?????? ????": "455vsetv",
"???": "518",
"???": "162",
"??? ???": "422",
"???+ ???? ????": "644",
"???+ ????????": "462",
"???+ ????????": "8",
"???+ ????????": "71",
"???+ ???? C???": "71",
"???+ ???????": "542",
"???+ ???? ????": "12",
"???+ ???? ????? ????": "485",
"???+ ????????": "566",
"???+ ?????????": "697",
"???+ ??? ??????": "499",
"???+ ??? ?????? HD": "889vsetv",
"???+ ?????": "134",
"???+?????": "134",
"???+ ????? ??????": "183",
"???+ ????? ??????": "183",
"???+ ????? ????": "306",
"???+ ????? ????": "377",
"???+ ????????": "910vsetv",
"???+ ????? ???": "910vsetv",
"???+ ????????": "910vsetv",
"???+ ??????": "358",
"???+ ??????": "664",
"???+ ?????? 2": "563",
"???+ ?????? HD": "664",
"???+ ?????? 2 HD": "563",
"??? (???????)": "140",
"??? ???????????": "884vsetv",
"?2??": "777",
"?2 ??": "777",
"??????": "376",
"???": "926",
"???": "880vsetv",
"??? ???????": "111vsetv",
"???????? ???": "692vsetv",
"????? ? ???????": "617",
"??????? ? ???????": "132",
"??????? ? ??????? HD": "842vsetv",
"???? ???????????": "37",
"?????? ????????????? (???)": "507",
"?????? ???????": "85",
"?????? ?????": "146",
"?????? ????? (??????)": "391",
"?????? ????? (???????)": "339vsetv",
"?????? ????? (???)": "391",
"?????? ????? HD": "983",
"?????? HD": "983",
"?????? ????????????? (??????)": "670vsetv",
"?????? ???????????? (???????)": "773",
"?????? ???????????????": "774",
"?????": "511",
"??????? ??": "940",
"????????": "24vsetv",
"?????? ??": "759vsetv",
"???????????": "161",
"??? ???": "458",
"Pro ???": "458",
"??? ???": "458",
"????? ??": "861vsetv",
"???????????": "685",
"?????????? 21": "434",
"????? ?????": "427",
"???????": "1003",
"???? ???????": "823vsetv",
"??? ??": "363",
"??? ??": "363",
"???": "743",
"??? ??": "689",
"??? ?? (+7)": "572vsetv",
"???": "509",
"????? ??": "6",
"?????? 1": "711",
"?????? 2": "515",
"?????? 24": "291",
"?????? ?": "187",
"?????? HD": "984",
"?????? HD": "984",
"???-???????": "143",
"??? ???????": "143",
"??????? ??????????": "994",
"??????? ????????": "53",
"??????? ?????": "401",
"??????? ???????": "406",
"??????? ????": "296vsetv",
"??????? ??": "663",
"???????": "663",
"?????": "874vsetv",
"????": "447",
"???? ??": "447",
"????? 1": "181",
"????? 1 HD": "554",
"????? 1 (???????)": "270vsetv",
"????? 2 (???????)": "309vsetv",
"?????????? ????????": "275",
"????": "349",
"???": "670",
"???": "79",
"??????": "284",
"???": "576",
"???": "576",
#"???": "694vsetv",
"???-?????????": "145vsetv",
"???-?????????": "145vsetv",
"??-?????-?????????????": "435",
"?? ????? ?????????????": "435",
"???": "776",
"?? 3": "698",
"?? 3 (+3)": "845vsetv",
"???": "650",
"TBi": "650",
"??i": "650",
"???": "649",
"?? ?????": "649",
"????????? 100": "887vsetv",
"???": "353",
"??? Bravo ???????": "737vsetv",
"???+4": "557vsetv",
"????? ??": "637",
"?????-??": "637",
"????????": "173",
"???????????????": "794",
"??????????????? HD": "331",
"???": "479",
"??? ???????": "326",
"??? ????": "75vsetv",
"Ukraine": "326",
"??? ?????": "730",
"???": "730",
"?????": "547",
"???????": "779",
"??i??": "740vsetv",
"???": "689vsetv",
"??????+ ????": "686",
"??????": "328",
"?????? (???????)": "666",
"??????+ (???????)": "753",
"?????? 1 ???????": "666",
"?????? 2 ???????": "753",
"?????": "702",
"??-????": "315",
"?????? ??": "662",
"?": "898",
"???? ??": "412",
"???? ??": "412",
"???? BOX": "412",
"???-??": "685vsetv",
"?????? ??": "431",
"1+1 (??????)": "620",
"112 ??????? (??????)": "921vsetv",
"112 (??????)": "921vsetv",
"100 ?? (??????)": "382vsetv",
"2+2 (??????)": "583",
"24 ??? (??????)": "16",
"24 ????? (??????)": "710",
"24 ??????? (??????)": "298vsetv",
"2x2 (??????)": "323",
"365 ???? (??????)": "250",
"5 ????? (???????) (??????)": "586",
"5 ????? ??????? (??????)": "586",
"8 ????? (??????)": "217",
"9 ?? ?????? (??????)": "782vsetv",
"ab moteurs (??????)": "127vsetv",
"Amedia 1 (??????)": "895vsetv",
"Amedia Premium (??????)": "896vsetv",
"Amazing Life (??????)": "658",
"Amedia 2 (??????)": "918",
"Animal Planet (??????)": "365",
"Animal Planet HD (??????)": "990",
"ATR (??????)": "763vsetv",
"A-One (??????)": "680",
"A-ONE UA (??????)": "772vsetv",
"AXN Sci-Fi (??????)": "516",
"SONY Sci-Fi (??????)": "516",
"Sony Sci-Fi (??????)": "516",
"BBC World News (??????)": "828",
"Bridge TV (??????)": "151",
"Business (??????)": "386vsetv",
"Cartoon Network (??????)": "601",
"CCTV 4 (??????)": "904vsetv",
"CCTV ??????? (??????)": "598vsetv",
"CBS Drama (??????)": "911",
"CBS Reality (??????)": "912",
"CNN International (??????)": "47vsetv",
"CNL (??????)": "392vsetv",
"Comedy TV (??????)": "51",
"C Music (??????)": "319",
"Da Vinci Learning (??????)": "410",
"DIVA Universal Russia (??????)": "713",
"Dobro TV (??????)": "937",
"Discovery Channel (??????)": "325",
"Discovery Science (??????)": "409",
"Discovery World (??????)": "437",
"Investigation Discovery Europe (??????)": "19",
"Investigation Discovery (??????)": "19",
"Daring TV (??????)": "696vsetv",
"Discovery HD Showcase (??????)": "111",
"Discowery HD Showcase (??????)": "111",
"Discovery Showcase HD (??????)": "111",
"Disney Channel (??????)": "150",
"English Club TV (??????)": "757",
"Enter Film (??????)": "281",
"EuroNews (??????)": "23",
"EuroNews UA (??????)": "23",
"Europa Plus TV (??????)": "681",
"Eurosport (??????)": "737",
"Eurosport Int. (Eng) (??????)": "737",
"Eurosport 2 (??????)": "850",
"Eurosport 2 Int. (Eng) (??????)": "850",
"Eurosport 2 HD (??????)": "850",
"Eurosport HD (??????)": "560",
"Extreme Sports (??????)": "288",
"Fashion TV (??????)": "661",
"Fashion TV HD (??????)": "121",
"Fashion HD (??????)": "121",
"Fashion One HD (??????)": "919",
"Fashion One (??????)": "919",
"Fox (??????)": "659",
"FOX HD (??????)": "659",
"Fox HD (??????)": "659",
"Fox Life (??????)": "615",
"FOX life HD (??????)": "464",
"FOX Life HD (??????)": "464",
"Fox Life HD (??????)": "464",
"France 24 (??????)": "187",
"France24 (??????)": "187",
"Galaxy TV (??????)": "924",
"GALAXY (??????)": "924",
"Gulli (??????)": "810",
"GLAS (??????)": "457vsetv",
"HD Life (??????)": "415",
"HD ????? (??????)": "429",
"HD ????? (??????)": "429",
"History Channel (??????)": "902vsetv",
"Hustler TV (??????)": "666vsetv",
"ICTV (??????)": "709",
"iConcerts TV HD (??????)": "797vsetv",
"JimJam (??????)": "494",
"Jewish News One (??????)": "796vsetv",
"JN1 (??????)": "796vsetv",
"Kids co (??????)": "598",
"KidsCo (??????)": "598",
"Lale (??????)": "911vsetv",
"Look TV (??????)": "726vsetv",
"Luxe TV HD (??????)": "536vsetv",
"Maxxi-TV (??????)": "228",
"MCM Top (??????)": "533",
"MGM (??????)": "608",
"MGM HD (??????)": "934",
"Mezzo (??????)": "575",
"Motor TV (??????)": "531",
"Motors TV (??????)": "531",
"Motors Tv (??????)": "531",
"MTV Russia (??????)": "1021",
"MTV ?????? (??????)": "1021",
"MTV Ukraina (??????)": "353vsetv",
"MTV Dance (??????)": "332",
"MTV Hits UK (??????)": "849",
"MTV Rocks (??????)": "388",
"MTV Music (??????)": "430",
"MTV live HD (??????)": "382",
"MTV Live HD (??????)": "382",
"Music Box UA (??????)": "417vsetv",
"Music Box (??????)": "642",
"Russian Music Box (??????)": "25",
"myZen.tv HD (??????)": "141",
"MyZen TV HD (??????)": "141",
"NBA TV (??????)": "790vsetv",
"Nat Geo Wild (??????)": "807",
"Nat Geo Wild HD (??????)": "807",
"National Geographic (??????)": "102",
"National Geographic HD (??????)": "389",
"News One (??????)": "247",
"Nick Jr. (??????)": "917",
"Nickelodeon (??????)": "567",
"Nickelodeon HD (??????)": "423",
"Ocean-TV (??????)": "55",
"O-TV (??????)": "167",
"Outdoor HD (??????)": "322",
"Outdoor Channel (??????)": "322",
"Paramount Comedy (??????)": "920",
"Playboy TV (??????)": "663vsetv",
"Private Spice (??????)": "143vsetv",
"Brazzers TV Europe (??????)": "143vsetv",
"QTV (??????)": "280",
"Real Estate-TV (??????)": "481vsetv",
"RTVi (??????)": "76",
"RU TV (??????)": "258",
"Ru Music (??????)": "388vsetv",
"Rusong TV (??????)": "591",
"Russian Travel Guide (??????)": "648",
"Russia Today Documentary (??????)": "788vsetv",
"SHOPping-TV (Ukraine) (??????)": "810vsetv",
"SET (??????)": "311",
"SET HD (??????)": "311",
"S.E.T (??????)": "311",
"Sony Turbo (??????)": "935",
"Smile of Child (??????)": "789",
"Star TV Ukraine (??????)": "513vsetv",
"STV (??????)": "165",
"Style TV (??????)": "119",
"Style tv (??????)": "119",
"Teen TV (??????)": "448vsetv",
"TiJi (??????)": "555",
"TLC (??????)": "425",
"TLC Europe (??????)": "777vsetv",
"Tonis (??????)": "627",
"Tonis HD (??????)": "627",
"TVCI (??????)": "435",
"TV Rus (??????)": "799vsetv",
"TV 1000 (??????)": "127",
"TV1000 (??????)": "127",
"TV 1000 Action East (??????)": "125",
"TV 1000 Action (??????)": "125",
"TV 1000 ??????? ???? (??????)": "267",
"TV1000 Action East (??????)": "125",
"TV1000 Action (??????)": "125",
"TV1000 ??????? ???? (??????)": "267",
"TV1000 Megahit HD (??????)": "816vsetv",
"TV1000 Premium HD (??????)": "814vsetv",
"TV1000 Comedy HD (??????)": "818vsetv",
"TV 1000 Megahit HD (??????)": "816vsetv",
"TV 1000 Premium HD (??????)": "814vsetv",
"TV 1000 Comedy HD (??????)": "818vsetv",
"Travel Channel (??????)": "88vsetv",
"Travel Channel HD (??????)": "690vsetv",
"Travel+ adventure (??????)": "832vsetv",
"TV XXI (TV21) (??????)": "309",
#"Ukrainian Fashion (??????)": "939",
"Universal Channel (??????)": "213",
"Ukrainian Fashion (??????)": "773vsetv",
"VH1 (??????)": "491",
"VH1 Classic (??????)": "156",
"Viasat Explorer (??????)": "521",
"Viasat Explorer CEE (??????)": "521",
"Viasat History (??????)": "277",
"Viasat Nature East (??????)": "765",
"Viasat Sport (??????)": "455",
"Viasat Sport HD (??????)": "455",
"VIASAT Sport Baltic (??????)": "504vsetv",
"Viasat Sport Baltic (??????)": "504vsetv",
"Sport Baltic (??????)": "504vsetv",
"Viasat Nature-History HD (??????)": "716vsetv",
"Viasat Nature/History HD (??????)": "716vsetv",
"World Fashion (??????)": "346",
"XSPORT (??????)": "748vsetv",
"Xsport (??????)": "748vsetv",
"XXL (??????)": "664vsetv",
"Zee TV (??????)": "626",
"Zoom (??????)": "1009",
"???? ???? (??????)": "153",
"???? ?? (??????)": "11",
"?????? (??????)": "918",
"????? ?? (??????)": "249",
"????? (??????)": "249",
"???????? 24 (??????)": "851",
"?????? (??????)": "481vsetv",
"???? (??????)": "454",
"?????????? ???? (??????)": "986",
"??? (??????)": "877vsetv",
"??? (??????)": "272vsetv",
"??????? ? ?????? (??????)": "333",
"????? (??????)": "669",
"??? (??????)": "139vsetv",
"????? (??????)": "479vsetv",
"???? (??????)": "294vsetv",
"????? ?? (??????)": "505vsetv",
"??????? (??????)": "66",
"??????? ??? (??????)": "747",
"????? (??????)": "384",
"????? HD (??????)": "384",
"??? ???? (??????)": "834",
"???????? ???????? (??????)": "520",
"???????? ???????? (??????)": "520",
"???????? (??????)": "304",
"???????? ??????? (??????)": "695vsetv",
"????? ?? (??????)": "505",
"???????? (??????)": "352",
"??? (??????)": "931",
"??? (??????)": "931",
"??? ?? (??????)": "931",
"??? HD (??????)": "930",
"???? (??????)": "113",
"?????? (??????)": "405",
"????? ?? (??????)": "178",
"????? ?? (??????)": "178",
"?????-?? (??????)": "178",
"????? ?? (??????)": "178",
"?????????? (??????)": "705",
"?????????? ????? (??????)": "21",
"?????? (??????)": "201",
"???????? ?? (??????)": "595",
"??? ?? (??????)": "273",
"??????? (??????)": "367",
"????????+ (??????)": "123",
"????????? (??????)": "754vsetv",
"????? (??????)": "798",
"????? (??????)": "677",
"?????+ (??????)": "808",
"?????????? ?? (??????)": "24",
"??????? ???? (??????)": "532vsetv",
"??????? (??????)": "879vsetv",
"?1 (??????)": "453",
"?2 (??????)": "20vsetv",
"???????? (??????)": "740",
"????????? (??????)": "22",
"??????? ?? (??????)": "821",
"????????????? ?????? (??????)": "852",
"??? ???? ??? (??????)": "769",
"??? ???? ??? (??????)": "769",
"??? (??????)": "149vsetv",
"???????? ???? (??????)": "149vsetv",
"????? ?? (??????)": "614",
"???????? ??????? (??????)": "285vsetv",
"??? ?? (??????)": "481",
"??? HD (??????)": "481",
"??-????? (??????)": "257",
"????? ?? (??????)": "920vsetv",
"?1 (??????)": "632",
"?2 (??????)": "445vsetv",
"???? (??????)": "788",
"???? ?? (??????)": "348vsetv",
"??? (??????)": "726",
"??? ??????? (??????)": "145",
"??? ??????? (??????)": "145",
"????? ?? (??????)": "799",
"??? ??????? (??????)": "675",
"?????? 24 (??????)": "334",
"?????? ??????? (??????)": "655",
"?????? ??????? (??????)": "655",
"?????? ??????? (??????)": "715",
"??????? (??????)": "82",
"??????? ?? (??????)": "606vsetv",
"??? ?????? (??????)": "761vsetv",
"???? ? ???? (??????)": "618",
"???? ? ???? (??????)": "618",
"??????????? (??????)": "31",
"??? ?? (??????)": "808vsetv",
"????? (??????)": "871vsetv",
"???i? (??????)": "871vsetv",
"???? ?? (??????)": "35",
"????? 2.0 (??????)": "723",
"???? ??????? ???? (??????)": "477",
"??? ?? (??????)": "843vsetv",
"????? ????? (??????)": "128",
"?????????? (??????)": "783",
"?????? ???? (??????)": "455vsetv",
"??? (??????)": "518",
"??? (??????)": "162",
"??? ??? (??????)": "422",
"???+ ???? ???? (??????)": "644",
"???+ ???????? (??????)": "462",
"???+ ???????? (??????)": "8",
"???+ ???????? (??????)": "71",
"???+ ???? C??? (??????)": "71",
"???+ ??????? (??????)": "542",
"???+ ???? ???? (??????)": "12",
"???+ ???? ????? ???? (??????)": "485",
"???+ ???????? (??????)": "566",
"???+ ????????? (??????)": "697",
"???+ ??? ?????? (??????)": "499",
"???+ ??? ?????? HD (??????)": "889vsetv",
"???+ ????? (??????)": "134",
"???+????? (??????)": "134",
"???+ ????? ?????? (??????)": "183",
"???+ ????? ?????? (??????)": "183",
"???+ ????? ???? (??????)": "306",
"???+ ????? ???? (??????)": "377",
"???+ ???????? (??????)": "910vsetv",
"???+ ????? ??? (??????)": "910vsetv",
"???+ ???????? (??????)": "910vsetv",
"???+ ?????? (??????)": "358",
"???+ ?????? (??????)": "664",
"???+ ?????? 2 (??????)": "563",
"???+ ?????? HD (??????)": "664",
"???+ ?????? 2 HD (??????)": "563",
"??? (???????) (??????)": "140",
"??? ??????????? (??????)": "884vsetv",
"?2?? (??????)": "777",
"?2 ?? (??????)": "777",
"?????? (??????)": "376",
"??? (??????)": "926",
"??? (??????)": "880vsetv",
"??? ??????? (??????)": "111vsetv",
"???????? ??? (??????)": "692vsetv",
"????? ? ??????? (??????)": "617",
"??????? ? ??????? (??????)": "132",
"??????? ? ??????? HD (??????)": "842vsetv",
"???? ??????????? (??????)": "37",
"?????? ????????????? (???) (??????)": "507",
"?????? ??????? (??????)": "85",
"?????? ????? (??????)": "146",
"?????? ????? (??????) (??????)": "391",
"?????? ????? (???????) (??????)": "339vsetv",
"?????? ????? (???) (??????)": "391",
"?????? ????? HD (??????)": "983",
"?????? HD (??????)": "983",
"?????? ????????????? (??????) (??????)": "670vsetv",
"?????? ???????????? (???????) (??????)": "773",
"?????? ??????????????? (??????)": "774",
"????? (??????)": "511",
"??????? ?? (??????)": "940",
"???????? (??????)": "24vsetv",
"?????? ?? (??????)": "759vsetv",
"??????????? (??????)": "161",
"??? ??? (??????)": "458",
"Pro ??? (??????)": "458",
"??? ??? (??????)": "458",
"????? ?? (??????)": "861vsetv",
"??????????? (??????)": "685",
"?????????? 21 (??????)": "434",
"????? ????? (??????)": "427",
"??????? (??????)": "1003",
"???? ??????? (??????)": "823vsetv",
"??? ?? (??????)": "363",
"??? ?? (??????)": "363",
"??? (??????)": "743",
"??? ?? (??????)": "689",
"??? ?? (+7) (??????)": "572vsetv",
"??? (??????)": "509",
"????? ?? (??????)": "6",
"?????? 1 (??????)": "711",
"?????? 2 (??????)": "515",
"?????? 24 (??????)": "291",
"?????? ? (??????)": "187",
"?????? HD (??????)": "984",
"?????? HD (??????)": "984",
"???-??????? (??????)": "143",
"??? ??????? (??????)": "143",
"??????? ?????????? (??????)": "994",
"??????? ???????? (??????)": "53",
"??????? ????? (??????)": "401",
"??????? ??????? (??????)": "406",
"??????? ???? (??????)": "296vsetv",
"??????? ?? (??????)": "663",
"??????? (??????)": "663",
"????? (??????)": "874vsetv",
"???? (??????)": "447",
"???? ?? (??????)": "447",
"????? 1 (??????)": "181",
"????? 1 HD (??????)": "554",
"????? 1 (???????) (??????)": "270vsetv",
"????? 2 (???????) (??????)": "309vsetv",
"?????????? ???????? (??????)": "275",
"???? (??????)": "349",
"??? (??????)": "670",
"??? (??????)": "79",
"?????? (??????)": "284",
"??? (??????)": "576",
"??? (??????)": "576",
#"??? (??????)": "694vsetv",
"???-????????? (??????)": "145vsetv",
"???-????????? (??????)": "145vsetv",
"??-?????-????????????? (??????)": "435",
"?? ????? ????????????? (??????)": "435",
"??? (??????)": "776",
"?? 3 (??????)": "698",
"?? 3 (+3) (??????)": "845vsetv",
"??? (??????)": "650",
"TBi (??????)": "650",
"??i (??????)": "650",
"??? (??????)": "649",
"?? ????? (??????)": "649",
"????????? 100 (??????)": "887vsetv",
"??? (??????)": "353",
"??? Bravo ??????? (??????)": "737vsetv",
"???+4 (??????)": "557vsetv",
"????? ?? (??????)": "637",
"?????-?? (??????)": "637",
"???????? (??????)": "173",
"??????????????? (??????)": "794",
"??????????????? HD (??????)": "331",
"??? (??????)": "479",
"??? ??????? (??????)": "326",
"??? ???? (??????)": "75vsetv",
"Ukraine (??????)": "326",
"??? ????? (??????)": "730",
"??? (??????)": "730",
"????? (??????)": "547",
"??????? (??????)": "779",
"??i?? (??????)": "740vsetv",
"??? (??????)": "689vsetv",
"??????+ ???? (??????)": "686",
"?????? (??????)": "328",
"?????? (???????) (??????)": "666",
"??????+ (???????) (??????)": "753",
"?????? 1 ??????? (??????)": "666",
"?????? 2 ??????? (??????)": "753",
"????? (??????)": "702",
"??-???? (??????)": "315",
"?????? ?? (??????)": "662",
"? (??????)": "898",
"???? ?? (??????)": "412",
"???? ?? (??????)": "412",
"???? BOX (??????)": "412",
"???-?? (??????)": "685vsetv",
"?????? ?? (??????)": "431",
"1+1(??????)": "620",
"112 ???????(??????)": "921vsetv",
"112(??????)": "921vsetv",
"100 ??(??????)": "382vsetv",
"2+2(??????)": "583",
"24 ???(??????)": "16",
"24 ?????(??????)": "710",
"24 ???????(??????)": "298vsetv",
"2x2(??????)": "323",
"365 ????(??????)": "250",
"5 ????? (???????)(??????)": "586",
"5 ????? ???????(??????)": "586",
"8 ?????(??????)": "217",
"9 ?? ??????(??????)": "782vsetv",
"ab moteurs(??????)": "127vsetv",
"Amedia 1(??????)": "895vsetv",
"Amedia Premium(??????)": "896vsetv",
"Amazing Life(??????)": "658",
"Amedia 2(??????)": "918",
"Animal Planet(??????)": "365",
"Animal Planet HD(??????)": "990",
"ATR(??????)": "763vsetv",
"A-One(??????)": "680",
"A-ONE UA(??????)": "772vsetv",
"AXN Sci-Fi(??????)": "516",
"SONY Sci-Fi(??????)": "516",
"Sony Sci-Fi(??????)": "516",
"BBC World News(??????)": "828",
"Bridge TV(??????)": "151",
"Business(??????)": "386vsetv",
"Cartoon Network(??????)": "601",
"CCTV 4(??????)": "904vsetv",
"CCTV ???????(??????)": "598vsetv",
"CBS Drama(??????)": "911",
"CBS Reality(??????)": "912",
"CNN International(??????)": "47vsetv",
"CNL(??????)": "392vsetv",
"Comedy TV(??????)": "51",
"C Music(??????)": "319",
"Da Vinci Learning(??????)": "410",
"DIVA Universal Russia(??????)": "713",
"Dobro TV(??????)": "937",
"Discovery Channel(??????)": "325",
"Discovery Science(??????)": "409",
"Discovery World(??????)": "437",
"Investigation Discovery Europe(??????)": "19",
"Investigation Discovery(??????)": "19",
"Daring TV(??????)": "696vsetv",
"Discovery HD Showcase(??????)": "111",
"Discowery HD Showcase(??????)": "111",
"Discovery Showcase HD(??????)": "111",
"Disney Channel(??????)": "150",
"English Club TV(??????)": "757",
"Enter Film(??????)": "281",
"EuroNews(??????)": "23",
"EuroNews UA(??????)": "23",
"Europa Plus TV(??????)": "681",
"Eurosport(??????)": "737",
"Eurosport Int. (Eng)(??????)": "737",
"Eurosport 2(??????)": "850",
"Eurosport 2 Int. (Eng)(??????)": "850",
"Eurosport 2 HD(??????)": "850",
"Eurosport HD(??????)": "560",
"Extreme Sports(??????)": "288",
"Fashion TV(??????)": "661",
"Fashion TV HD(??????)": "121",
"Fashion HD(??????)": "121",
"Fashion One HD(??????)": "919",
"Fashion One(??????)": "919",
"Fox(??????)": "659",
"FOX HD(??????)": "659",
"Fox HD(??????)": "659",
"Fox Life(??????)": "615",
"FOX life HD(??????)": "464",
"FOX Life HD(??????)": "464",
"Fox Life HD(??????)": "464",
"France 24(??????)": "187",
"France24(??????)": "187",
"Galaxy TV(??????)": "924",
"GALAXY(??????)": "924",
"Gulli(??????)": "810",
"GLAS(??????)": "457vsetv",
"HD Life(??????)": "415",
"HD ?????(??????)": "429",
"HD ?????(??????)": "429",
"History Channel(??????)": "902vsetv",
"Hustler TV(??????)": "666vsetv",
"ICTV(??????)": "709",
"iConcerts TV HD(??????)": "797vsetv",
"JimJam(??????)": "494",
"Jewish News One(??????)": "796vsetv",
"JN1(??????)": "796vsetv",
"Kids co(??????)": "598",
"KidsCo(??????)": "598",
"Lale(??????)": "911vsetv",
"Look TV(??????)": "726vsetv",
"Luxe TV HD(??????)": "536vsetv",
"Maxxi-TV(??????)": "228",
"MCM Top(??????)": "533",
"MGM(??????)": "608",
"MGM HD(??????)": "934",
"Mezzo(??????)": "575",
"Motor TV(??????)": "531",
"Motors TV(??????)": "531",
"Motors Tv(??????)": "531",
"MTV Russia(??????)": "1021",
"MTV ??????(??????)": "1021",
"MTV Ukraina(??????)": "353vsetv",
"MTV Dance(??????)": "332",
"MTV Hits UK(??????)": "849",
"MTV Rocks(??????)": "388",
"MTV Music(??????)": "430",
"MTV live HD(??????)": "382",
"MTV Live HD(??????)": "382",
"Music Box UA(??????)": "417vsetv",
"Music Box(??????)": "642",
"Russian Music Box(??????)": "25",
"myZen.tv HD(??????)": "141",
"MyZen TV HD(??????)": "141",
"NBA TV(??????)": "790vsetv",
"Nat Geo Wild(??????)": "807",
"Nat Geo Wild HD(??????)": "807",
"National Geographic(??????)": "102",
"National Geographic HD(??????)": "389",
"News One(??????)": "247",
"Nick Jr.(??????)": "917",
"Nickelodeon(??????)": "567",
"Nickelodeon HD(??????)": "423",
"Ocean-TV(??????)": "55",
"O-TV(??????)": "167",
"Outdoor HD(??????)": "322",
"Outdoor Channel(??????)": "322",
"Paramount Comedy(??????)": "920",
"Playboy TV(??????)": "663vsetv",
"Private Spice(??????)": "143vsetv",
"Brazzers TV Europe(??????)": "143vsetv",
"QTV(??????)": "280",
"Real Estate-TV(??????)": "481vsetv",
"RTVi(??????)": "76",
"RU TV(??????)": "258",
"Ru Music(??????)": "388vsetv",
"Rusong TV(??????)": "591",
"Russian Travel Guide(??????)": "648",
"Russia Today Documentary(??????)": "788vsetv",
"SHOPping-TV (Ukraine)(??????)": "810vsetv",
"SET(??????)": "311",
"SET HD(??????)": "311",
"S.E.T(??????)": "311",
"Sony Turbo(??????)": "935",
"Smile of Child(??????)": "789",
"Star TV Ukraine(??????)": "513vsetv",
"STV(??????)": "165",
"Style TV(??????)": "119",
"Style tv(??????)": "119",
"Teen TV(??????)": "448vsetv",
"TiJi(??????)": "555",
"TLC(??????)": "425",
"TLC Europe(??????)": "777vsetv",
"Tonis(??????)": "627",
"Tonis HD(??????)": "627",
"TVCI(??????)": "435",
"TV Rus(??????)": "799vsetv",
"TV 1000(??????)": "127",
"TV1000(??????)": "127",
"TV 1000 Action East(??????)": "125",
"TV 1000 Action(??????)": "125",
"TV 1000 ??????? ????(??????)": "267",
"TV1000 Action East(??????)": "125",
"TV1000 Action(??????)": "125",
"TV1000 ??????? ????(??????)": "267",
"TV1000 Megahit HD(??????)": "816vsetv",
"TV1000 Premium HD(??????)": "814vsetv",
"TV1000 Comedy HD(??????)": "818vsetv",
"TV 1000 Megahit HD(??????)": "816vsetv",
"TV 1000 Premium HD(??????)": "814vsetv",
"TV 1000 Comedy HD(??????)": "818vsetv",
"Travel Channel(??????)": "88vsetv",
"Travel Channel HD(??????)": "690vsetv",
"Travel+ adventure(??????)": "832vsetv",
"TV XXI (TV21)(??????)": "309",
#"Ukrainian Fashion(??????)": "939",
"Universal Channel(??????)": "213",
"Ukrainian Fashion(??????)": "773vsetv",
"VH1(??????)": "491",
"VH1 Classic(??????)": "156",
"Viasat Explorer(??????)": "521",
"Viasat Explorer CEE(??????)": "521",
"Viasat History(??????)": "277",
"Viasat Nature East(??????)": "765",
"Viasat Sport(??????)": "455",
"Viasat Sport HD(??????)": "455",
"VIASAT Sport Baltic(??????)": "504vsetv",
"Viasat Sport Baltic(??????)": "504vsetv",
"Sport Baltic(??????)": "504vsetv",
"Viasat Nature-History HD(??????)": "716vsetv",
"Viasat Nature/History HD(??????)": "716vsetv",
"World Fashion(??????)": "346",
"XSPORT(??????)": "748vsetv",
"Xsport(??????)": "748vsetv",
"XXL(??????)": "664vsetv",
"Zee TV(??????)": "626",
"Zoom(??????)": "1009",
"???? ????(??????)": "153",
"???? ??(??????)": "11",
"??????(??????)": "918",
"????? ??(??????)": "249",
"?????(??????)": "249",
"???????? 24(??????)": "851",
"??????(??????)": "481vsetv",
"????(??????)": "454",
"?????????? ????(??????)": "986",
"???(??????)": "877vsetv",
"???(??????)": "272vsetv",
"??????? ? ??????(??????)": "333",
"?????(??????)": "669",
"???(??????)": "139vsetv",
"?????(??????)": "479vsetv",
"????(??????)": "294vsetv",
"????? ??(??????)": "505vsetv",
"???????(??????)": "66",
"??????? ???(??????)": "747",
"?????(??????)": "384",
"????? HD(??????)": "384",
"??? ????(??????)": "834",
"???????? ????????(??????)": "520",
"???????? ????????(??????)": "520",
"????????(??????)": "304",
"???????? ???????(??????)": "695vsetv",
"????? ??(??????)": "505",
"????????(??????)": "352",
"???(??????)": "931",
"???(??????)": "931",
"??? ??(??????)": "931",
"??? HD(??????)": "930",
"????(??????)": "113",
"??????(??????)": "405",
"????? ??(??????)": "178",
"????? ??(??????)": "178",
"?????-??(??????)": "178",
"????? ??(??????)": "178",
"??????????(??????)": "705",
"?????????? ?????(??????)": "21",
"??????(??????)": "201",
"???????? ??(??????)": "595",
"??? ??(??????)": "273",
"???????(??????)": "367",
"????????+(??????)": "123",
"?????????(??????)": "754vsetv",
"?????(??????)": "798",
"?????(??????)": "677",
"?????+(??????)": "808",
"?????????? ??(??????)": "24",
"??????? ????(??????)": "532vsetv",
"???????(??????)": "879vsetv",
"?1(??????)": "453",
"?2(??????)": "20vsetv",
"????????(??????)": "740",
"?????????(??????)": "22",
"??????? ??(??????)": "821",
"????????????? ??????(??????)": "852",
"??? ???? ???(??????)": "769",
"??? ???? ???(??????)": "769",
"???(??????)": "149vsetv",
"???????? ????(??????)": "149vsetv",
"????? ??(??????)": "614",
"???????? ???????(??????)": "285vsetv",
"??? ??(??????)": "481",
"??? HD(??????)": "481",
"??-?????(??????)": "257",
"????? ??(??????)": "920vsetv",
"?1(??????)": "632",
"?2(??????)": "445vsetv",
"????(??????)": "788",
"???? ??(??????)": "348vsetv",
"???(??????)": "726",
"??? ???????(??????)": "145",
"??? ???????(??????)": "145",
"????? ??(??????)": "799",
"??? ???????(??????)": "675",
"?????? 24(??????)": "334",
"?????? ???????(??????)": "655",
"?????? ???????(??????)": "655",
"?????? ???????(??????)": "715",
"???????(??????)": "82",
"??????? ??(??????)": "606vsetv",
"??? ??????(??????)": "761vsetv",
"???? ? ????(??????)": "618",
"???? ? ????(??????)": "618",
"???????????(??????)": "31",
"??? ??(??????)": "808vsetv",
"?????(??????)": "871vsetv",
"???i?(??????)": "871vsetv",
"???? ??(??????)": "35",
"????? 2.0(??????)": "723",
"???? ??????? ????(??????)": "477",
"??? ??(??????)": "843vsetv",
"????? ?????(??????)": "128",
"??????????(??????)": "783",
"?????? ????(??????)": "455vsetv",
"???(??????)": "518",
"???(??????)": "162",
"??? ???(??????)": "422",
"???+ ???? ????(??????)": "644",
"???+ ????????(??????)": "462",
"???+ ????????(??????)": "8",
"???+ ????????(??????)": "71",
"???+ ???? C???(??????)": "71",
"???+ ???????(??????)": "542",
"???+ ???? ????(??????)": "12",
"???+ ???? ????? ????(??????)": "485",
"???+ ????????(??????)": "566",
"???+ ?????????(??????)": "697",
"???+ ??? ??????(??????)": "499",
"???+ ??? ?????? HD(??????)": "889vsetv",
"???+ ?????(??????)": "134",
"???+?????(??????)": "134",
"???+ ????? ??????(??????)": "183",
"???+ ????? ??????(??????)": "183",
"???+ ????? ????(??????)": "306",
"???+ ????? ????(??????)": "377",
"???+ ????????(??????)": "910vsetv",
"???+ ????? ???(??????)": "910vsetv",
"???+ ????????(??????)": "910vsetv",
"???+ ??????(??????)": "358",
"???+ ??????(??????)": "664",
"???+ ?????? 2(??????)": "563",
"???+ ?????? HD(??????)": "664",
"???+ ?????? 2 HD(??????)": "563",
"??? (???????)(??????)": "140",
"??? ???????????(??????)": "884vsetv",
"?2??(??????)": "777",
"?2 ??(??????)": "777",
"??????(??????)": "376",
"???(??????)": "926",
"???(??????)": "880vsetv",
"??? ???????(??????)": "111vsetv",
"???????? ???(??????)": "692vsetv",
"????? ? ???????(??????)": "617",
"??????? ? ???????(??????)": "132",
"??????? ? ??????? HD(??????)": "842vsetv",
"???? ???????????(??????)": "37",
"?????? ????????????? (???)(??????)": "507",
"?????? ???????(??????)": "85",
"?????? ?????(??????)": "146",
"?????? ????? (??????)(??????)": "391",
"?????? ????? (???????)(??????)": "339vsetv",
"?????? ????? (???)(??????)": "391",
"?????? ????? HD(??????)": "983",
"?????? HD(??????)": "983",
"?????? ????????????? (??????)(??????)": "670vsetv",
"?????? ???????????? (???????)(??????)": "773",
"?????? ???????????????(??????)": "774",
"?????(??????)": "511",
"??????? ??(??????)": "940",
"????????(??????)": "24vsetv",
"?????? ??(??????)": "759vsetv",
"???????????(??????)": "161",
"??? ???(??????)": "458",
"Pro ???(??????)": "458",
"??? ???(??????)": "458",
"????? ??(??????)": "861vsetv",
"???????????(??????)": "685",
"?????????? 21(??????)": "434",
"????? ?????(??????)": "427",
"???????(??????)": "1003",
"???? ???????(??????)": "823vsetv",
"??? ??(??????)": "363",
"??? ??(??????)": "363",
"???(??????)": "743",
"??? ??(??????)": "689",
"??? ?? (+7)(??????)": "572vsetv",
"???(??????)": "509",
"????? ??(??????)": "6",
"?????? 1(??????)": "711",
"?????? 2(??????)": "515",
"?????? 24(??????)": "291",
"?????? ?(??????)": "187",
"?????? HD(??????)": "984",
"?????? HD(??????)": "984",
"???-???????(??????)": "143",
"??? ???????(??????)": "143",
"??????? ??????????(??????)": "994",
"??????? ????????(??????)": "53",
"??????? ?????(??????)": "401",
"??????? ???????(??????)": "406",
"??????? ????(??????)": "296vsetv",
"??????? ??(??????)": "663",
"???????(??????)": "663",
"?????(??????)": "874vsetv",
"????(??????)": "447",
"???? ??(??????)": "447",
"????? 1(??????)": "181",
"????? 1 HD(??????)": "554",
"????? 1 (???????)(??????)": "270vsetv",
"????? 2 (???????)(??????)": "309vsetv",
"?????????? ????????(??????)": "275",
"????(??????)": "349",
"???(??????)": "670",
"???(??????)": "79",
"??????(??????)": "284",
"???(??????)": "576",
"???(??????)": "576",
#"???(??????)": "694vsetv",
"???-?????????(??????)": "145vsetv",
"???-?????????(??????)": "145vsetv",
"??-?????-?????????????(??????)": "435",
"?? ????? ?????????????(??????)": "435",
"???(??????)": "776",
"?? 3(??????)": "698",
"?? 3 (+3)(??????)": "845vsetv",
"???(??????)": "650",
"TBi(??????)": "650",
"??i(??????)": "650",
"???(??????)": "649",
"?? ?????(??????)": "649",
"????????? 100(??????)": "887vsetv",
"???(??????)": "353",
"??? Bravo ???????(??????)": "737vsetv",
"???+4(??????)": "557vsetv",
"????? ??(??????)": "637",
"?????-??(??????)": "637",
"????????(??????)": "173",
"???????????????(??????)": "794",
"??????????????? HD(??????)": "331",
"???(??????)": "479",
"??? ???????(??????)": "326",
"??? ????(??????)": "75vsetv",
"Ukraine(??????)": "326",
"??? ?????(??????)": "730",
"???(??????)": "730",
"?????(??????)": "547",
"???????(??????)": "779",
"??i??(??????)": "740vsetv",
"???(??????)": "689vsetv",
"??????+ ????(??????)": "686",
"??????(??????)": "328",
"?????? (???????)(??????)": "666",
"??????+ (???????)(??????)": "753",
"?????? 1 ???????(??????)": "666",
"?????? 2 ???????(??????)": "753",
"?????(??????)": "702",
"??-????(??????)": "315",
"?????? ??(??????)": "662",
"?(??????)": "898",
"???? ??(??????)": "412",
"???? ??(??????)": "412",
"???? BOX(??????)": "412",
"???-??(??????)": "685vsetv",
"?????? ??(??????)": "431",
"1+1(??????)": "620",
"112 ???????(??????)": "921vsetv",
"112(??????)": "921vsetv",
"100 ??(??????)": "382vsetv",
"2+2(??????)": "583",
"24 ???(??????)": "16",
"24 ?????(??????)": "710",
"24 ???????(??????)": "298vsetv",
"2x2(??????)": "323",
"365 ????(??????)": "250",
"5 ????? (???????)(??????)": "586",
"5 ????? ???????(??????)": "586",
"8 ?????(??????)": "217",
"9 ?? ??????(??????)": "782vsetv",
"ab moteurs(??????)": "127vsetv",
"Amedia 1(??????)": "895vsetv",
"Amedia Premium(??????)": "896vsetv",
"Amazing Life(??????)": "658",
"Amedia 2(??????)": "918",
"Animal Planet(??????)": "365",
"Animal Planet HD(??????)": "990",
"ATR(??????)": "763vsetv",
"A-One(??????)": "680",
"A-ONE UA(??????)": "772vsetv",
"AXN Sci-Fi(??????)": "516",
"SONY Sci-Fi(??????)": "516",
"Sony Sci-Fi(??????)": "516",
"BBC World News(??????)": "828",
"Bridge TV(??????)": "151",
"Business(??????)": "386vsetv",
"Cartoon Network(??????)": "601",
"CCTV 4(??????)": "904vsetv",
"CCTV ???????(??????)": "598vsetv",
"CBS Drama(??????)": "911",
"CBS Reality(??????)": "912",
"CNN International(??????)": "47vsetv",
"CNL(??????)": "392vsetv",
"Comedy TV(??????)": "51",
"C Music(??????)": "319",
"Da Vinci Learning(??????)": "410",
"DIVA Universal Russia(??????)": "713",
"Dobro TV(??????)": "937",
"Discovery Channel(??????)": "325",
"Discovery Science(??????)": "409",
"Discovery World(??????)": "437",
"Investigation Discovery Europe(??????)": "19",
"Investigation Discovery(??????)": "19",
"Daring TV(??????)": "696vsetv",
"Discovery HD Showcase(??????)": "111",
"Discowery HD Showcase(??????)": "111",
"Discovery Showcase HD(??????)": "111",
"Disney Channel(??????)": "150",
"English Club TV(??????)": "757",
"Enter Film(??????)": "281",
"EuroNews(??????)": "23",
"EuroNews UA(??????)": "23",
"Europa Plus TV(??????)": "681",
"Eurosport(??????)": "737",
"Eurosport Int. (Eng)(??????)": "737",
"Eurosport 2(??????)": "850",
"Eurosport 2 Int. (Eng)(??????)": "850",
"Eurosport 2 HD(??????)": "850",
"Eurosport HD(??????)": "560",
"Extreme Sports(??????)": "288",
"Fashion TV(??????)": "661",
"Fashion TV HD(??????)": "121",
"Fashion HD(??????)": "121",
"Fashion One HD(??????)": "919",
"Fashion One(??????)": "919",
"Fox(??????)": "659",
"FOX HD(??????)": "659",
"Fox HD(??????)": "659",
"Fox Life(??????)": "615",
"FOX life HD(??????)": "464",
"FOX Life HD(??????)": "464",
"Fox Life HD(??????)": "464",
"France 24(??????)": "187",
"France24(??????)": "187",
"Galaxy TV(??????)": "924",
"GALAXY(??????)": "924",
"Gulli(??????)": "810",
"GLAS(??????)": "457vsetv",
"HD Life(??????)": "415",
"HD ?????(??????)": "429",
"HD ?????(??????)": "429",
"History Channel(??????)": "902vsetv",
"Hustler TV(??????)": "666vsetv",
"ICTV(??????)": "709",
"iConcerts TV HD(??????)": "797vsetv",
"JimJam(??????)": "494",
"Jewish News One(??????)": "796vsetv",
"JN1(??????)": "796vsetv",
"Kids co(??????)": "598",
"KidsCo(??????)": "598",
"Lale(??????)": "911vsetv",
"Look TV(??????)": "726vsetv",
"Luxe TV HD(??????)": "536vsetv",
"Maxxi-TV(??????)": "228",
"MCM Top(??????)": "533",
"MGM(??????)": "608",
"MGM HD(??????)": "934",
"Mezzo(??????)": "575",
"Motor TV(??????)": "531",
"Motors TV(??????)": "531",
"Motors Tv(??????)": "531",
"MTV Russia(??????)": "1021",
"MTV ??????(??????)": "1021",
"MTV Ukraina(??????)": "353vsetv",
"MTV Dance(??????)": "332",
"MTV Hits UK(??????)": "849",
"MTV Rocks(??????)": "388",
"MTV Music(??????)": "430",
"MTV live HD(??????)": "382",
"MTV Live HD(??????)": "382",
"Music Box UA(??????)": "417vsetv",
"Music Box(??????)": "642",
"Russian Music Box(??????)": "25",
"myZen.tv HD(??????)": "141",
"MyZen TV HD(??????)": "141",
"NBA TV(??????)": "790vsetv",
"Nat Geo Wild(??????)": "807",
"Nat Geo Wild HD(??????)": "807",
"National Geographic(??????)": "102",
"National Geographic HD(??????)": "389",
"News One(??????)": "247",
"Nick Jr.(??????)": "917",
"Nickelodeon(??????)": "567",
"Nickelodeon HD(??????)": "423",
"Ocean-TV(??????)": "55",
"O-TV(??????)": "167",
"Outdoor HD(??????)": "322",
"Outdoor Channel(??????)": "322",
"Paramount Comedy(??????)": "920",
"Playboy TV(??????)": "663vsetv",
"Private Spice(??????)": "143vsetv",
"Brazzers TV Europe(??????)": "143vsetv",
"QTV(??????)": "280",
"Real Estate-TV(??????)": "481vsetv",
"RTVi(??????)": "76",
"RU TV(??????)": "258",
"Ru Music(??????)": "388vsetv",
"Rusong TV(??????)": "591",
"Russian Travel Guide(??????)": "648",
"Russia Today Documentary(??????)": "788vsetv",
"SHOPping-TV (Ukraine)(??????)": "810vsetv",
"SET(??????)": "311",
"SET HD(??????)": "311",
"S.E.T(??????)": "311",
"Sony Turbo(??????)": "935",
"Smile of Child(??????)": "789",
"Star TV Ukraine(??????)": "513vsetv",
"STV(??????)": "165",
"Style TV(??????)": "119",
"Style tv(??????)": "119",
"Teen TV(??????)": "448vsetv",
"TiJi(??????)": "555",
"TLC(??????)": "425",
"TLC Europe(??????)": "777vsetv",
"Tonis(??????)": "627",
"Tonis HD(??????)": "627",
"TVCI(??????)": "435",
"TV Rus(??????)": "799vsetv",
"TV 1000(??????)": "127",
"TV1000(??????)": "127",
"TV 1000 Action East(??????)": "125",
"TV 1000 Action(??????)": "125",
"TV 1000 ??????? ????(??????)": "267",
"TV1000 Action East(??????)": "125",
"TV1000 Action(??????)": "125",
"TV1000 ??????? ????(??????)": "267",
"TV1000 Megahit HD(??????)": "816vsetv",
"TV1000 Premium HD(??????)": "814vsetv",
"TV1000 Comedy HD(??????)": "818vsetv",
"TV 1000 Megahit HD(??????)": "816vsetv",
"TV 1000 Premium HD(??????)": "814vsetv",
"TV 1000 Comedy HD(??????)": "818vsetv",
"Travel Channel(??????)": "88vsetv",
"Travel Channel HD(??????)": "690vsetv",
"Travel+ adventure(??????)": "832vsetv",
"TV XXI (TV21)(??????)": "309",
#"Ukrainian Fashion(??????)": "939",
"Universal Channel(??????)": "213",
"Ukrainian Fashion(??????)": "773vsetv",
"VH1(??????)": "491",
"VH1 Classic(??????)": "156",
"Viasat Explorer(??????)": "521",
"Viasat Explorer CEE(??????)": "521",
"Viasat History(??????)": "277",
"Viasat Nature East(??????)": "765",
"Viasat Sport(??????)": "455",
"Viasat Sport HD(??????)": "455",
"VIASAT Sport Baltic(??????)": "504vsetv",
"Viasat Sport Baltic(??????)": "504vsetv",
"Sport Baltic(??????)": "504vsetv",
"Viasat Nature-History HD(??????)": "716vsetv",
"Viasat Nature/History HD(??????)": "716vsetv",
"World Fashion(??????)": "346",
"XSPORT(??????)": "748vsetv",
"Xsport(??????)": "748vsetv",
"XXL(??????)": "664vsetv",
"Zee TV(??????)": "626",
"Zoom(??????)": "1009",
"???? ????(??????)": "153",
"???? ??(??????)": "11",
"??????(??????)": "918",
"????? ??(??????)": "249",
"?????(??????)": "249",
"???????? 24(??????)": "851",
"??????(??????)": "481vsetv",
"????(??????)": "454",
"?????????? ????(??????)": "986",
"???(??????)": "877vsetv",
"???(??????)": "272vsetv",
"??????? ? ??????(??????)": "333",
"?????(??????)": "669",
"???(??????)": "139vsetv",
"?????(??????)": "479vsetv",
"????(??????)": "294vsetv",
"????? ??(??????)": "505vsetv",
"???????(??????)": "66",
"??????? ???(??????)": "747",
"?????(??????)": "384",
"????? HD(??????)": "384",
"??? ????(??????)": "834",
"???????? ????????(??????)": "520",
"???????? ????????(??????)": "520",
"????????(??????)": "304",
"???????? ???????(??????)": "695vsetv",
"????? ??(??????)": "505",
"????????(??????)": "352",
"???(??????)": "931",
"???(??????)": "931",
"??? ??(??????)": "931",
"??? HD(??????)": "930",
"????(??????)": "113",
"??????(??????)": "405",
"????? ??(??????)": "178",
"????? ??(??????)": "178",
"?????-??(??????)": "178",
"????? ??(??????)": "178",
"??????????(??????)": "705",
"?????????? ?????(??????)": "21",
"??????(??????)": "201",
"???????? ??(??????)": "595",
"??? ??(??????)": "273",
"???????(??????)": "367",
"????????+(??????)": "123",
"?????????(??????)": "754vsetv",
"?????(??????)": "798",
"?????(??????)": "677",
"?????+(??????)": "808",
"?????????? ??(??????)": "24",
"??????? ????(??????)": "532vsetv",
"???????(??????)": "879vsetv",
"?1(??????)": "453",
"?2(??????)": "20vsetv",
"????????(??????)": "740",
"?????????(??????)": "22",
"??????? ??(??????)": "821",
"????????????? ??????(??????)": "852",
"??? ???? ???(??????)": "769",
"??? ???? ???(??????)": "769",
"???(??????)": "149vsetv",
"???????? ????(??????)": "149vsetv",
"????? ??(??????)": "614",
"???????? ???????(??????)": "285vsetv",
"??? ??(??????)": "481",
"??? HD(??????)": "481",
"??-?????(??????)": "257",
"????? ??(??????)": "920vsetv",
"?1(??????)": "632",
"?2(??????)": "445vsetv",
"????(??????)": "788",
"???? ??(??????)": "348vsetv",
"???(??????)": "726",
"??? ???????(??????)": "145",
"??? ???????(??????)": "145",
"????? ??(??????)": "799",
"??? ???????(??????)": "675",
"?????? 24(??????)": "334",
"?????? ???????(??????)": "655",
"?????? ???????(??????)": "655",
"?????? ???????(??????)": "715",
"???????(??????)": "82",
"??????? ??(??????)": "606vsetv",
"??? ??????(??????)": "761vsetv",
"???? ? ????(??????)": "618",
"???? ? ????(??????)": "618",
"???????????(??????)": "31",
"??? ??(??????)": "808vsetv",
"?????(??????)": "871vsetv",
"???i?(??????)": "871vsetv",
"???? ??(??????)": "35",
"????? 2.0(??????)": "723",
"???? ??????? ????(??????)": "477",
"??? ??(??????)": "843vsetv",
"????? ?????(??????)": "128",
"??????????(??????)": "783",
"?????? ????(??????)": "455vsetv",
"???(??????)": "518",
"???(??????)": "162",
"??? ???(??????)": "422",
"???+ ???? ????(??????)": "644",
"???+ ????????(??????)": "462",
"???+ ????????(??????)": "8",
"???+ ????????(??????)": "71",
"???+ ???? C???(??????)": "71",
"???+ ???????(??????)": "542",
"???+ ???? ????(??????)": "12",
"???+ ???? ????? ????(??????)": "485",
"???+ ????????(??????)": "566",
"???+ ?????????(??????)": "697",
"???+ ??? ??????(??????)": "499",
"???+ ??? ?????? HD(??????)": "889vsetv",
"???+ ?????(??????)": "134",
"???+?????(??????)": "134",
"???+ ????? ??????(??????)": "183",
"???+ ????? ??????(??????)": "183",
"???+ ????? ????(??????)": "306",
"???+ ????? ????(??????)": "377",
"???+ ????????(??????)": "910vsetv",
"???+ ????? ???(??????)": "910vsetv",
"???+ ????????(??????)": "910vsetv",
"???+ ??????(??????)": "358",
"???+ ??????(??????)": "664",
"???+ ?????? 2(??????)": "563",
"???+ ?????? HD(??????)": "664",
"???+ ?????? 2 HD(??????)": "563",
"??? (???????)(??????)": "140",
"??? ???????????(??????)": "884vsetv",
"?2??(??????)": "777",
"?2 ??(??????)": "777",
"??????(??????)": "376",
"???(??????)": "926",
"???(??????)": "880vsetv",
"??? ???????(??????)": "111vsetv",
"???????? ???(??????)": "692vsetv",
"????? ? ???????(??????)": "617",
"??????? ? ???????(??????)": "132",
"??????? ? ??????? HD(??????)": "842vsetv",
"???? ???????????(??????)": "37",
"?????? ????????????? (???)(??????)": "507",
"?????? ???????(??????)": "85",
"?????? ?????(??????)": "146",
"?????? ????? (??????)(??????)": "391",
"?????? ????? (???????)(??????)": "339vsetv",
"?????? ????? (???)(??????)": "391",
"?????? ????? HD(??????)": "983",
"?????? HD(??????)": "983",
"?????? ????????????? (??????)(??????)": "670vsetv",
"?????? ???????????? (???????)(??????)": "773",
"?????? ???????????????(??????)": "774",
"?????(??????)": "511",
"??????? ??(??????)": "940",
"????????(??????)": "24vsetv",
"?????? ??(??????)": "759vsetv",
"???????????(??????)": "161",
"??? ???(??????)": "458",
"Pro ???(??????)": "458",
"??? ???(??????)": "458",
"????? ??(??????)": "861vsetv",
"???????????(??????)": "685",
"?????????? 21(??????)": "434",
"????? ?????(??????)": "427",
"???????(??????)": "1003",
"???? ???????(??????)": "823vsetv",
"??? ??(??????)": "363",
"??? ??(??????)": "363",
"???(??????)": "743",
"??? ??(??????)": "689",
"??? ?? (+7)(??????)": "572vsetv",
"???(??????)": "509",
"????? ??(??????)": "6",
"?????? 1(??????)": "711",
"?????? 2(??????)": "515",
"?????? 24(??????)": "291",
"?????? ?(??????)": "187",
"?????? HD(??????)": "984",
"?????? HD(??????)": "984",
"???-???????(??????)": "143",
"??? ???????(??????)": "143",
"??????? ??????????(??????)": "994",
"??????? ????????(??????)": "53",
"??????? ?????(??????)": "401",
"??????? ???????(??????)": "406",
"??????? ????(??????)": "296vsetv",
"??????? ??(??????)": "663",
"???????(??????)": "663",
"?????(??????)": "874vsetv",
"????(??????)": "447",
"???? ??(??????)": "447",
"????? 1(??????)": "181",
"????? 1 HD(??????)": "554",
"????? 1 (???????)(??????)": "270vsetv",
"????? 2 (???????)(??????)": "309vsetv",
"?????????? ????????(??????)": "275",
"????(??????)": "349",
"???(??????)": "670",
"???(??????)": "79",
"??????(??????)": "284",
"???(??????)": "576",
"???(??????)": "576",
#"???(??????)": "694vsetv",
"???-?????????(??????)": "145vsetv",
"???-?????????(??????)": "145vsetv",
"??-?????-?????????????(??????)": "435",
"?? ????? ?????????????(??????)": "435",
"???(??????)": "776",
"?? 3(??????)": "698",
"?? 3 (+3)(??????)": "845vsetv",
"???(??????)": "650",
"TBi(??????)": "650",
"??i(??????)": "650",
"???(??????)": "649",
"?? ?????(??????)": "649",
"????????? 100(??????)": "887vsetv",
"???(??????)": "353",
"??? Bravo ???????(??????)": "737vsetv",
"???+4(??????)": "557vsetv",
"????? ??(??????)": "637",
"?????-??(??????)": "637",
"????????(??????)": "173",
"???????????????(??????)": "794",
"??????????????? HD(??????)": "331",
"???(??????)": "479",
"??? ???????(??????)": "326",
"??? ????(??????)": "75vsetv",
"Ukraine(??????)": "326",
"??? ?????(??????)": "730",
"???(??????)": "730",
"?????(??????)": "547",
"???????(??????)": "779",
"??i??(??????)": "740vsetv",
"???(??????)": "689vsetv",
"??????+ ????(??????)": "686",
"??????(??????)": "328",
"?????? (???????)(??????)": "666",
"??????+ (???????)(??????)": "753",
"?????? 1 ???????(??????)": "666",
"?????? 2 ???????(??????)": "753",
"?????(??????)": "702",
"??-????(??????)": "315",
"?????? ??(??????)": "662",
"?(??????)": "898",
"???? ??(??????)": "412",
"???? ??(??????)": "412",
"???? BOX(??????)": "412",
"???-??(??????)": "685vsetv",
"?????? ??(??????)": "431",
"1+1 (??????)": "620",
"112 ??????? (??????)": "921vsetv",
"112 (??????)": "921vsetv",
"100 ?? (??????)": "382vsetv",
"2+2 (??????)": "583",
"24 ??? (??????)": "16",
"24 ????? (??????)": "710",
"24 ??????? (??????)": "298vsetv",
"2x2 (??????)": "323",
"365 ???? (??????)": "250",
"5 ????? (???????) (??????)": "586",
"5 ????? ??????? (??????)": "586",
"8 ????? (??????)": "217",
"9 ?? ?????? (??????)": "782vsetv",
"ab moteurs (??????)": "127vsetv",
"Amedia 1 (??????)": "895vsetv",
"Amedia Premium (??????)": "896vsetv",
"Amazing Life (??????)": "658",
"Amedia 2 (??????)": "918",
"Animal Planet (??????)": "365",
"Animal Planet HD (??????)": "990",
"ATR (??????)": "763vsetv",
"A-One (??????)": "680",
"A-ONE UA (??????)": "772vsetv",
"AXN Sci-Fi (??????)": "516",
"SONY Sci-Fi (??????)": "516",
"Sony Sci-Fi (??????)": "516",
"BBC World News (??????)": "828",
"Bridge TV (??????)": "151",
"Business (??????)": "386vsetv",
"Cartoon Network (??????)": "601",
"CCTV 4 (??????)": "904vsetv",
"CCTV ??????? (??????)": "598vsetv",
"CBS Drama (??????)": "911",
"CBS Reality (??????)": "912",
"CNN International (??????)": "47vsetv",
"CNL (??????)": "392vsetv",
"Comedy TV (??????)": "51",
"C Music (??????)": "319",
"Da Vinci Learning (??????)": "410",
"DIVA Universal Russia (??????)": "713",
"Dobro TV (??????)": "937",
"Discovery Channel (??????)": "325",
"Discovery Science (??????)": "409",
"Discovery World (??????)": "437",
"Investigation Discovery Europe (??????)": "19",
"Investigation Discovery (??????)": "19",
"Daring TV (??????)": "696vsetv",
"Discovery HD Showcase (??????)": "111",
"Discowery HD Showcase (??????)": "111",
"Discovery Showcase HD (??????)": "111",
"Disney Channel (??????)": "150",
"English Club TV (??????)": "757",
"Enter Film (??????)": "281",
"EuroNews (??????)": "23",
"EuroNews UA (??????)": "23",
"Europa Plus TV (??????)": "681",
"Eurosport (??????)": "737",
"Eurosport Int. (Eng) (??????)": "737",
"Eurosport 2 (??????)": "850",
"Eurosport 2 Int. (Eng) (??????)": "850",
"Eurosport 2 HD (??????)": "850",
"Eurosport HD (??????)": "560",
"Extreme Sports (??????)": "288",
"Fashion TV (??????)": "661",
"Fashion TV HD (??????)": "121",
"Fashion HD (??????)": "121",
"Fashion One HD (??????)": "919",
"Fashion One (??????)": "919",
"Fox (??????)": "659",
"FOX HD (??????)": "659",
"Fox HD (??????)": "659",
"Fox Life (??????)": "615",
"FOX life HD (??????)": "464",
"FOX Life HD (??????)": "464",
"Fox Life HD (??????)": "464",
"France 24 (??????)": "187",
"France24 (??????)": "187",
"Galaxy TV (??????)": "924",
"GALAXY (??????)": "924",
"Gulli (??????)": "810",
"GLAS (??????)": "457vsetv",
"HD Life (??????)": "415",
"HD ????? (??????)": "429",
"HD ????? (??????)": "429",
"History Channel (??????)": "902vsetv",
"Hustler TV (??????)": "666vsetv",
"ICTV (??????)": "709",
"iConcerts TV HD (??????)": "797vsetv",
"JimJam (??????)": "494",
"Jewish News One (??????)": "796vsetv",
"JN1 (??????)": "796vsetv",
"Kids co (??????)": "598",
"KidsCo (??????)": "598",
"Lale (??????)": "911vsetv",
"Look TV (??????)": "726vsetv",
"Luxe TV HD (??????)": "536vsetv",
"Maxxi-TV (??????)": "228",
"MCM Top (??????)": "533",
"MGM (??????)": "608",
"MGM HD (??????)": "934",
"Mezzo (??????)": "575",
"Motor TV (??????)": "531",
"Motors TV (??????)": "531",
"Motors Tv (??????)": "531",
"MTV Russia (??????)": "1021",
"MTV ?????? (??????)": "1021",
"MTV Ukraina (??????)": "353vsetv",
"MTV Dance (??????)": "332",
"MTV Hits UK (??????)": "849",
"MTV Rocks (??????)": "388",
"MTV Music (??????)": "430",
"MTV live HD (??????)": "382",
"MTV Live HD (??????)": "382",
"Music Box UA (??????)": "417vsetv",
"Music Box (??????)": "642",
"Russian Music Box (??????)": "25",
"myZen.tv HD (??????)": "141",
"MyZen TV HD (??????)": "141",
"NBA TV (??????)": "790vsetv",
"Nat Geo Wild (??????)": "807",
"Nat Geo Wild HD (??????)": "807",
"National Geographic (??????)": "102",
"National Geographic HD (??????)": "389",
"News One (??????)": "247",
"Nick Jr. (??????)": "917",
"Nickelodeon (??????)": "567",
"Nickelodeon HD (??????)": "423",
"Ocean-TV (??????)": "55",
"O-TV (??????)": "167",
"Outdoor HD (??????)": "322",
"Outdoor Channel (??????)": "322",
"Paramount Comedy (??????)": "920",
"Playboy TV (??????)": "663vsetv",
"Private Spice (??????)": "143vsetv",
"Brazzers TV Europe (??????)": "143vsetv",
"QTV (??????)": "280",
"Real Estate-TV (??????)": "481vsetv",
"RTVi (??????)": "76",
"RU TV (??????)": "258",
"Ru Music (??????)": "388vsetv",
"Rusong TV (??????)": "591",
"Russian Travel Guide (??????)": "648",
"Russia Today Documentary (??????)": "788vsetv",
"SHOPping-TV (Ukraine) (??????)": "810vsetv",
"SET (??????)": "311",
"SET HD (??????)": "311",
"S.E.T (??????)": "311",
"Sony Turbo (??????)": "935",
"Smile of Child (??????)": "789",
"Star TV Ukraine (??????)": "513vsetv",
"STV (??????)": "165",
"Style TV (??????)": "119",
"Style tv (??????)": "119",
"Teen TV (??????)": "448vsetv",
"TiJi (??????)": "555",
"TLC (??????)": "425",
"TLC Europe (??????)": "777vsetv",
"Tonis (??????)": "627",
"Tonis HD (??????)": "627",
"TVCI (??????)": "435",
"TV Rus (??????)": "799vsetv",
"TV 1000 (??????)": "127",
"TV1000 (??????)": "127",
"TV 1000 Action East (??????)": "125",
"TV 1000 Action (??????)": "125",
"TV 1000 ??????? ???? (??????)": "267",
"TV1000 Action East (??????)": "125",
"TV1000 Action (??????)": "125",
"TV1000 ??????? ???? (??????)": "267",
"TV1000 Megahit HD (??????)": "816vsetv",
"TV1000 Premium HD (??????)": "814vsetv",
"TV1000 Comedy HD (??????)": "818vsetv",
"TV 1000 Megahit HD (??????)": "816vsetv",
"TV 1000 Premium HD (??????)": "814vsetv",
"TV 1000 Comedy HD (??????)": "818vsetv",
"Travel Channel (??????)": "88vsetv",
"Travel Channel HD (??????)": "690vsetv",
"Travel+ adventure (??????)": "832vsetv",
"TV XXI (TV21) (??????)": "309",
#"Ukrainian Fashion (??????)": "939",
"Universal Channel (??????)": "213",
"Ukrainian Fashion (??????)": "773vsetv",
"VH1 (??????)": "491",
"VH1 Classic (??????)": "156",
"Viasat Explorer (??????)": "521",
"Viasat Explorer CEE (??????)": "521",
"Viasat History (??????)": "277",
"Viasat Nature East (??????)": "765",
"Viasat Sport (??????)": "455",
"Viasat Sport HD (??????)": "455",
"VIASAT Sport Baltic (??????)": "504vsetv",
"Viasat Sport Baltic (??????)": "504vsetv",
"Sport Baltic (??????)": "504vsetv",
"Viasat Nature-History HD (??????)": "716vsetv",
"Viasat Nature/History HD (??????)": "716vsetv",
"World Fashion (??????)": "346",
"XSPORT (??????)": "748vsetv",
"Xsport (??????)": "748vsetv",
"XXL (??????)": "664vsetv",
"Zee TV (??????)": "626",
"Zoom (??????)": "1009",
"???? ???? (??????)": "153",
"???? ?? (??????)": "11",
"?????? (??????)": "918",
"????? ?? (??????)": "249",
"????? (??????)": "249",
"???????? 24 (??????)": "851",
"?????? (??????)": "481vsetv",
"???? (??????)": "454",
"?????????? ???? (??????)": "986",
"??? (??????)": "877vsetv",
"??? (??????)": "272vsetv",
"??????? ? ?????? (??????)": "333",
"????? (??????)": "669",
"??? (??????)": "139vsetv",
"????? (??????)": "479vsetv",
"???? (??????)": "294vsetv",
"????? ?? (??????)": "505vsetv",
"??????? (??????)": "66",
"??????? ??? (??????)": "747",
"????? (??????)": "384",
"????? HD (??????)": "384",
"??? ???? (??????)": "834",
"???????? ???????? (??????)": "520",
"???????? ???????? (??????)": "520",
"???????? (??????)": "304",
"???????? ??????? (??????)": "695vsetv",
"????? ?? (??????)": "505",
"???????? (??????)": "352",
"??? (??????)": "931",
"??? (??????)": "931",
"??? ?? (??????)": "931",
"??? HD (??????)": "930",
"???? (??????)": "113",
"?????? (??????)": "405",
"????? ?? (??????)": "178",
"????? ?? (??????)": "178",
"?????-?? (??????)": "178",
"????? ?? (??????)": "178",
"?????????? (??????)": "705",
"?????????? ????? (??????)": "21",
"?????? (??????)": "201",
"???????? ?? (??????)": "595",
"??? ?? (??????)": "273",
"??????? (??????)": "367",
"????????+ (??????)": "123",
"????????? (??????)": "754vsetv",
"????? (??????)": "798",
"????? (??????)": "677",
"?????+ (??????)": "808",
"?????????? ?? (??????)": "24",
"??????? ???? (??????)": "532vsetv",
"??????? (??????)": "879vsetv",
"?1 (??????)": "453",
"?2 (??????)": "20vsetv",
"???????? (??????)": "740",
"????????? (??????)": "22",
"??????? ?? (??????)": "821",
"????????????? ?????? (??????)": "852",
"??? ???? ??? (??????)": "769",
"??? ???? ??? (??????)": "769",
"??? (??????)": "149vsetv",
"???????? ???? (??????)": "149vsetv",
"????? ?? (??????)": "614",
"???????? ??????? (??????)": "285vsetv",
"??? ?? (??????)": "481",
"??? HD (??????)": "481",
"??-????? (??????)": "257",
"????? ?? (??????)": "920vsetv",
"?1 (??????)": "632",
"?2 (??????)": "445vsetv",
"???? (??????)": "788",
"???? ?? (??????)": "348vsetv",
"??? (??????)": "726",
"??? ??????? (??????)": "145",
"??? ??????? (??????)": "145",
"????? ?? (??????)": "799",
"??? ??????? (??????)": "675",
"?????? 24 (??????)": "334",
"?????? ??????? (??????)": "655",
"?????? ??????? (??????)": "655",
"?????? ??????? (??????)": "715",
"??????? (??????)": "82",
"??????? ?? (??????)": "606vsetv",
"??? ?????? (??????)": "761vsetv",
"???? ? ???? (??????)": "618",
"???? ? ???? (??????)": "618",
"??????????? (??????)": "31",
"??? ?? (??????)": "808vsetv",
"????? (??????)": "871vsetv",
"???i? (??????)": "871vsetv",
"???? ?? (??????)": "35",
"????? 2.0 (??????)": "723",
"???? ??????? ???? (??????)": "477",
"??? ?? (??????)": "843vsetv",
"????? ????? (??????)": "128",
"?????????? (??????)": "783",
"?????? ???? (??????)": "455vsetv",
"??? (??????)": "518",
"??? (??????)": "162",
"??? ??? (??????)": "422",
"???+ ???? ???? (??????)": "644",
"???+ ???????? (??????)": "462",
"???+ ???????? (??????)": "8",
"???+ ???????? (??????)": "71",
"???+ ???? C??? (??????)": "71",
"???+ ??????? (??????)": "542",
"???+ ???? ???? (??????)": "12",
"???+ ???? ????? ???? (??????)": "485",
"???+ ???????? (??????)": "566",
"???+ ????????? (??????)": "697",
"???+ ??? ?????? (??????)": "499",
"???+ ??? ?????? HD (??????)": "889vsetv",
"???+ ????? (??????)": "134",
"???+????? (??????)": "134",
"???+ ????? ?????? (??????)": "183",
"???+ ????? ?????? (??????)": "183",
"???+ ????? ???? (??????)": "306",
"???+ ????? ???? (??????)": "377",
"???+ ???????? (??????)": "910vsetv",
"???+ ????? ??? (??????)": "910vsetv",
"???+ ???????? (??????)": "910vsetv",
"???+ ?????? (??????)": "358",
"???+ ?????? (??????)": "664",
"???+ ?????? 2 (??????)": "563",
"???+ ?????? HD (??????)": "664",
"???+ ?????? 2 HD (??????)": "563",
"??? (???????) (??????)": "140",
"??? ??????????? (??????)": "884vsetv",
"?2?? (??????)": "777",
"?2 ?? (??????)": "777",
"?????? (??????)": "376",
"??? (??????)": "926",
"??? (??????)": "880vsetv",
"??? ??????? (??????)": "111vsetv",
"???????? ??? (??????)": "692vsetv",
"????? ? ??????? (??????)": "617",
"??????? ? ??????? (??????)": "132",
"??????? ? ??????? HD (??????)": "842vsetv",
"???? ??????????? (??????)": "37",
"?????? ????????????? (???) (??????)": "507",
"?????? ??????? (??????)": "85",
"?????? ????? (??????)": "146",
"?????? ????? (??????) (??????)": "391",
"?????? ????? (???????) (??????)": "339vsetv",
"?????? ????? (???) (??????)": "391",
"?????? ????? HD (??????)": "983",
"?????? HD (??????)": "983",
"?????? ????????????? (??????) (??????)": "670vsetv",
"?????? ???????????? (???????) (??????)": "773",
"?????? ??????????????? (??????)": "774",
"????? (??????)": "511",
"??????? ?? (??????)": "940",
"???????? (??????)": "24vsetv",
"?????? ?? (??????)": "759vsetv",
"??????????? (??????)": "161",
"??? ??? (??????)": "458",
"Pro ??? (??????)": "458",
"??? ??? (??????)": "458",
"????? ?? (??????)": "861vsetv",
"??????????? (??????)": "685",
"?????????? 21 (??????)": "434",
"????? ????? (??????)": "427",
"??????? (??????)": "1003",
"???? ??????? (??????)": "823vsetv",
"??? ?? (??????)": "363",
"??? ?? (??????)": "363",
"??? (??????)": "743",
"??? ?? (??????)": "689",
"??? ?? (+7) (??????)": "572vsetv",
"??? (??????)": "509",
"????? ?? (??????)": "6",
"?????? 1 (??????)": "711",
"?????? 2 (??????)": "515",
"?????? 24 (??????)": "291",
"?????? ? (??????)": "187",
"?????? HD (??????)": "984",
"?????? HD (??????)": "984",
"???-??????? (??????)": "143",
"??? ??????? (??????)": "143",
"??????? ?????????? (??????)": "994",
"??????? ???????? (??????)": "53",
"??????? ????? (??????)": "401",
"??????? ??????? (??????)": "406",
"??????? ???? (??????)": "296vsetv",
"??????? ?? (??????)": "663",
"??????? (??????)": "663",
"????? (??????)": "874vsetv",
"???? (??????)": "447",
"???? ?? (??????)": "447",
"????? 1 (??????)": "181",
"????? 1 HD (??????)": "554",
"????? 1 (???????) (??????)": "270vsetv",
"????? 2 (???????) (??????)": "309vsetv",
"?????????? ???????? (??????)": "275",
"???? (??????)": "349",
"??? (??????)": "670",
"??? (??????)": "79",
"?????? (??????)": "284",
"??? (??????)": "576",
"??? (??????)": "576",
#"??? (??????)": "694vsetv",
"???-????????? (??????)": "145vsetv",
"???-????????? (??????)": "145vsetv",
"??-?????-????????????? (??????)": "435",
"?? ????? ????????????? (??????)": "435",
"??? (??????)": "776",
"?? 3 (??????)": "698",
"?? 3 (+3) (??????)": "845vsetv",
"??? (??????)": "650",
"TBi (??????)": "650",
"??i (??????)": "650",
"??? (??????)": "649",
"?? ????? (??????)": "649",
"????????? 100 (??????)": "887vsetv",
"??? (??????)": "353",
"??? Bravo ??????? (??????)": "737vsetv",
"???+4 (??????)": "557vsetv",
"????? ?? (??????)": "637",
"?????-?? (??????)": "637",
"???????? (??????)": "173",
"??????????????? (??????)": "794",
"??????????????? HD (??????)": "331",
"??? (??????)": "479",
"??? ??????? (??????)": "326",
"??? ???? (??????)": "75vsetv",
"Ukraine (??????)": "326",
"??? ????? (??????)": "730",
"??? (??????)": "730",
"????? (??????)": "547",
"??????? (??????)": "779",
"??i?? (??????)": "740vsetv",
"??? (??????)": "689vsetv",
"??????+ ???? (??????)": "686",
"?????? (??????)": "328",
"?????? (???????) (??????)": "666",
"??????+ (???????) (??????)": "753",
"?????? 1 ??????? (??????)": "666",
"?????? 2 ??????? (??????)": "753",
"????? (??????)": "702",
"??-???? (??????)": "315",
"?????? ?? (??????)": "662",
"? (??????)": "898",
"???? ?? (??????)": "412",
"???? ?? (??????)": "412",
"???? BOX (??????)": "412",
"???-?? (??????)": "685vsetv",
"?????? ?? (??????)": "431",
}
#####################################
def GetChannelsDB (params):
#########################
try:
import YaTv
except: pass
#########################
db = DataBase(db_name, cookie)
channels = None
if not params.has_key('group'):
return
elif params['group'] == '0':
channels = db.GetChannels(adult = adult)
elif params['group'] == 'hd':
channels = db.GetChannelsHD(adult = adult)
elif params['group'] == 'latest':
channels = db.GetLatestChannels(adult = adult)
elif params['group'] == 'new':
channels = db.GetNewChannels(adult = adult)
elif params['group'] == 'favourite':
channels = db.GetFavouriteChannels(adult = adult)
else:
channels = db.GetChannels(params['group'], adult = adult)
import time
for ch in channels:
img = ch['imgurl']
if __addon__.getSetting('logopack') == 'true':
logo_path = os.path.join(PLUGIN_DATA_PATH, 'logo')
logo_src = os.path.join(logo_path, ch['name'].decode('utf-8') + '.png')
if os.path.exists(logo_src):
img = logo_src
title = ch['name']
if params['group'] == '0' or params['group'] == 'hd' or params['group'] == 'latest' or params['group'] == 'new':
title = '[COLOR FF7092BE]%s:[/COLOR] %s' % (ch['group_name'], title)
###################################
try:
d=[]
ni=dx[ch['name']]
d=YaTv.GetPr(id2=ni)
except:ni=ch['name']
try:prog = d["plot"]
except:prog =""
try:
tbn=d["img"]
if tbn == '': tbn = img
except:tbn = img
try:
genre = d["genre"]
if genre == "": genre = ch['group_name']
except:genre = ch['group_name']
if ch_b == "true":
if ch_i == "true": title = "[I][B][COLOR FF"+ch_color+"]" + title + "[/COLOR][/B][/I]"
else: title = "[B][COLOR FF"+ch_color+"]" + title + "[/COLOR][/B]"
else:
if ch_i == "true": title = "[I][COLOR FF"+ch_color+"]" + title + "[/COLOR][/I]"
else: title = "[COLOR FF"+ch_color+"]" + title + "[/COLOR]"
try:
if d["strt"] > time.time(): title = title
else: title =title +pr_str+d["plttime"]+" "+d["pltprog"]
prog =chr(10)+prog
#if d["strt"] > time.time(): prog1 = ""
#else:
#try:prog1 = d["prog1"]
#except:prog1 = ""
except:
try:title =title +pr_str+d["plttime"]+" "+d["pltprog"]
except:title =title
prog =chr(10)+prog
#try:prog1 = d["prog1"]
#except:prog1 = ""
#if prog1 == "":
#prog1 = title
try:title1 = (d["plttime"]+" "+d["pltprog"]).strip()
except:title1 = title
if __addon__.getSetting('fanart') == 'false':
if __addon__.getSetting('disable') == 'false':
li = xbmcgui.ListItem(title, title, img, img)
li.setProperty('fanart_image', tbn.encode('utf-8'))
else:
li = xbmcgui.ListItem(title, title, img, img)
li.setProperty('fanart_image', img)
else:
if __addon__.getSetting('disable') == 'false':
li = xbmcgui.ListItem(title, title, img, tbn.encode('utf-8'))
else:
li = xbmcgui.ListItem(title, title, img, img)
startTime = time.localtime()#float(item['start'])
endTime = time.localtime()#item['end']
li.setInfo(type = "Video", infoLabels = {"Title": ch['name'], 'year': endTime.tm_year, 'genre': genre, 'plot': prog})
###################################
uri = construct_request({
'func': 'play_ch_db',
'img': img.encode('utf-8'),
'title': title1,
#'studio': prog1,
'file': ch['urlstream'],
'id': ch['id']
})
deluri = construct_request({
'func': 'DelChannel',
'id': ch['id']
})
favouriteuri = construct_request({
'func': 'FavouriteChannel',
'id': ch['id']
})
delfavouriteuri = construct_request({
'func': 'DelFavouriteChannel',
'id': ch['id']
})
deldb = construct_request({
'func': 'DelDB',
})
commands = []
if params['group'] != 'favourite':
commands.append(('[COLOR FF669933]????????[/COLOR][COLOR FFB77D00] ? "?????????"[/COLOR]', 'XBMC.RunPlugin(%s)' % (favouriteuri),))
commands.append(('[COLOR FFCC3333]???????[/COLOR][COLOR FFB77D00] ?? "?????????"[/COLOR]', 'XBMC.RunPlugin(%s)' % (delfavouriteuri),))
commands.append(('??????? ?????', 'XBMC.RunPlugin(%s)' % (deluri),))
commands.append(('??????? ?? ???????', 'XBMC.RunPlugin(%s)' % (deldb),))
li.addContextMenuItems(commands)
xbmcplugin.addDirectoryItem(hos, uri, li)
xbmcplugin.endOfDirectory(hos)
del db
def DelChannel(params):
db = DataBase(db_name, cookie)
db.DelChannel(params['id'])
showMessage(message = '????? ??????')
xbmc.executebuiltin("Container.Refresh")
del db
def FavouriteChannel(params):
db = DataBase(db_name, cookie)
db.FavouriteChannel(params['id'])
showMessage(message = '????? ????????')
xbmc.executebuiltin("Container.Refresh")
del db
def DelFavouriteChannel(params):
db = DataBase(db_name, cookie)
db.DelFavouriteChannel(params['id'])
showMessage(message = '????? ??????')
xbmc.executebuiltin("Container.Refresh")
del db
def DelDB(params):
db = DataBase(db_name, cookie)
#db.RemoveDB()
rem = db.RemoveDB()
if rem == 0:
xbmc.executebuiltin("Container.Refresh")
showMessage(message = '???? ??????? ???????')
elif rem == 1:
showMessage(message = '?? ??????? ??????? ???? ???????')
else:
showMessage(message = '???? ??????? ??? ???????')
del db
def GetChannelsWeb(params):
#########################
try:
import YaTv
except: pass
#########################
http = GET('http://torrent-tv.ru/' + params['file'])
if http == None:
http = GET('http://1ttv.org/' + params['file'])
if http == None:
showMessage('Torrent TV', '????? ?? ????????')
return
beautifulSoup = BeautifulSoup(http)
channels=beautifulSoup.findAll('div', attrs={'class': 'best-channels-content'})
for ch in channels:
link =ch.find('a')['href']
title= ch.find('strong').string.encode('utf-8').replace('\n', '').strip()
img='http://torrent-tv.ru/'+ch.find('img')['src']
if __addon__.getSetting('logopack') == "true":
logo_path = os.path.join(PLUGIN_DATA_PATH, 'logo')
logo_src = os.path.join(logo_path, ch.find('strong').string.replace('\n', '').replace(' ', '') + '.png')
if os.path.exists(logo_src):
img = logo_src
###################################
try:
d=[]
ni=dx[title.strip()]
d=YaTv.GetPr(id2=ni)
except:ni=title.strip()
try:prog = d["plot"]
except:prog =""
try:
tbn=d["img"]
if tbn == '': tbn = img
except:tbn = img
try:genre = d["genre"]
except:genre = ''
if ch_b == "true":
if ch_i == "true": title = "[I][B][COLOR FF"+ch_color+"]" + title + "[/COLOR][/B][/I]"
else: title = "[B][COLOR FF"+ch_color+"]" + title + "[/COLOR][/B]"
else:
if ch_i == "true": title = "[I][COLOR FF"+ch_color+"]" + title + "[/COLOR][/I]"
else: title = "[COLOR FF"+ch_color+"]" + title + "[/COLOR]"
try:
if d["strt"] > time.time(): title = title
else: title =title +pr_str+d["plttime"]+" "+d["pltprog"]
prog =chr(10)+prog
#if d["strt"] > time.time(): prog1 = ""
#else:
#try:prog1 = d["prog1"]
#except:prog1 = ""
except:
try:title =title +pr_str+d["plttime"]+" "+d["pltprog"]
except:title =title
prog =chr(10)+prog
#try:prog1 = d["prog1"]
#except:prog1 = ""
#if prog1 == "":
#prog1 = title
try:title1 = (d["plttime"]+" "+d["pltprog"]).strip()
except:title1 = title
if __addon__.getSetting('fanart') == 'false':
if __addon__.getSetting('disable') == 'false':
li = xbmcgui.ListItem(title, title, img, img)
li.setProperty('fanart_image', tbn.encode('utf-8'))
else:
li = xbmcgui.ListItem(title, title, img, img)
li.setProperty('fanart_image', img)
else:
if __addon__.getSetting('disable') == 'false':
li = xbmcgui.ListItem(title, title, img, tbn.encode('utf-8'))
else:
li = xbmcgui.ListItem(title, title, img, img)
startTime = time.localtime()#float(item['start'])
endTime = time.localtime()#item['end']
li.setInfo(type = "Video", infoLabels = {"Title": title, 'year': endTime.tm_year, 'genre': genre, 'plot': prog} )
###################################
uri = construct_request({
'func': 'play_ch_web',
'img':img.encode('utf-8'),
'title':title1,
'file':link
})
commands = []
li.addContextMenuItems(commands)
xbmcplugin.addDirectoryItem(hos, uri, li)
xbmcplugin.endOfDirectory(hos)
def GetArchive(params):
#date = datetime.datetime.now().timetuple()
#title = str(date.tm_mday) + '-' + str(date.tm_mon) + '-' + str(date.tm_year)
http = GET('http://torrent-tv.ru/' + params['file'])#+'?data='+title)
if http == None:
http = GET('http://1ttv.org/' + params['file'])
if http == None:
showMessage('Torrent TV', '????? ?? ????????')
return
beautifulSoup = BeautifulSoup(http)
channels=beautifulSoup.findAll('div', attrs={'class': 'best-channels-content'})
for ch in channels:
link =ch.find('a')['href']
title= ch.find('strong').string.encode('utf-8').replace('\n', '').strip()
img='http://torrent-tv.ru/'+ch.find('img')['src']
if __addon__.getSetting('logopack') == "true":
logo_path = os.path.join(PLUGIN_DATA_PATH, 'logo')
logo_src = os.path.join(logo_path, ch.find('strong').string.replace('\n', '').replace(' ', '') + '.png')
if os.path.exists(logo_src):
img = logo_src
if ch_b == "true":
if ch_i == "true": title = "[I][B][COLOR FF"+ch_color+"]" + title + "[/COLOR][/B][/I]"
else: title = "[B][COLOR FF"+ch_color+"]" + title + "[/COLOR][/B]"
else:
if ch_i == "true": title = "[I][COLOR FF"+ch_color+"]" + title + "[/COLOR][/I]"
else: title = "[COLOR FF"+ch_color+"]" + title + "[/COLOR]"
li = xbmcgui.ListItem(title, title, img, img)
startTime = time.localtime()
endTime = time.localtime()
li.setInfo(type = "Video", infoLabels = {"Title": title, 'year': endTime.tm_year})
uri = construct_request({
'func': 'getArchiveCalendar',
'img':img.encode('utf-8'),
'title':title,
'file':link
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
xbmcplugin.endOfDirectory(hos)
def getArchiveCalendar(params):
res = re.compile('&data=.*')
res.findall(params['file'])
if res:
date_site = res.findall(params['file'])[0].replace('&data=', '')
#print date_site
dt = datetime.datetime.fromtimestamp(time.mktime(time.strptime(date_site, '%d-%m-%Y')))
#print dt
for i in range(int(archive)):
#date = datetime.datetime.now() - datetime.timedelta(days=i)
date = dt - datetime.timedelta(days=i)
date = date.timetuple()
title = str(date.tm_mday) + '-' + str(date.tm_mon) + '-' + str(date.tm_year)
li = xbmcgui.ListItem(title)
uri = construct_request({
'func': 'getArchiveDate',
'date': title,
'file': params['file'],
'img': params['img'],
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
xbmcplugin.endOfDirectory(hos)
def getArchiveDate(params):
date = datetime.datetime.now().timetuple()
title = str(date.tm_mday) + '-' + str(date.tm_mon) + '-' + str(date.tm_year)
http = GET('http://torrent-tv.ru/' + params['file'].replace(title,params['date']))
if http == None:
http = GET('http://1ttv.org/' + params['file'].replace(title,params['date']))
if http == None:
showMessage('Torrent TV', '????? ?? ????????')
return
beautifulSoup = BeautifulSoup(http)
channels=beautifulSoup.findAll('div', attrs={'class': 'best-channels'})
search = channels[0].findAll('p')
for ch in search:
if not ch.find('strong'): continue
link =str(ch.find('a')['href']).replace('\n', '').strip()
title= str(ch.find('a').string.encode('utf-8').replace('\n', '')).strip()
time_title = str(ch.find('strong')).replace('–', '').replace('\n', '').replace('<strong>', '').replace('</strong>', '').strip()
img = params['img']
if prog_b == "true":
if prog_i == "true": title = "[I][B][COLOR FF"+prog_color+"]" + time_title + ' - ' + title + "[/COLOR][/B][/I]"
else: title = "[B][COLOR FF"+prog_color+"]" + time_title + ' - ' + title + "[/COLOR][/B]"
else:
if prog_i == "true": title = "[I][COLOR FF"+prog_color+"]" + time_title + ' - ' + title + "[/COLOR][/I]"
else: title = "[COLOR FF"+prog_color+"]" + time_title + ' - ' + title + "[/COLOR]"
li = xbmcgui.ListItem(title, title, img, img)
startTime = time.localtime()
endTime = time.localtime()
li.setInfo(type = "Video", infoLabels = {"Title": title, 'year': endTime.tm_year})
uri = construct_request({
'func': 'play_ch_web',
'img':img,
'title':title,
'file':link
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
xbmcplugin.endOfDirectory(hos)
def play_ch_db(params):
xbmc.executebuiltin('Action(Stop)')
try:
page = GET('http://torrent-tv.ru/torrent-online.php?translation=' + str(params['id']), data)
if page == None:
page = GET('http://1ttv.org/torrent-online.php?translation=' + str(params['id']), data)
if page == None:
showMessage('Torrent TV', '????? ?? ????????')
return
res = re.compile('DateTime = ".*"')
res.findall(page)
if res:
DateTime = res.findall(page)[0].replace('DateTime = ', '').replace('"', '')
php = GET('http://www.torrent-tv.ru/calendar.php?date=' + str(int(time.time()))+'453&datetime='+str(DateTime), data)
else:
print "ERROR getting DateTime"
except Exception, e:
print 'play_ch_db ERROR: %s' % e
url = ''
if params['file'] == '':
cookie = ''
if os.path.exists(cookiefile):
fgetcook = open(cookiefile, 'r')
cookie = fgetcook.read()
del fgetcook
if not cookie:
cookie = ''
db = DataBase(db_name, cookie)
url = db.GetUrlsStream(params['id'])
if url.__len__() == 0:
showMessage('?????? ????????? ??????')
return
else:
url = params['file']
if url != '':
TSPlayer = tsengine()
out = None
if url.find('http://') == -1:
out = TSPlayer.load_torrent(url,'PID',port=aceport)
else:
out = TSPlayer.load_torrent(url,'TORRENT',port=aceport)
if out == 'Ok':
TSPlayer.play_url_ind(0,params['title'],addon_icon,params['img'])
db = DataBase(db_name, cookie='')
db.IncChannel(params['id'])
del db
TSPlayer.end()
xbmc.executebuiltin('Container.Refresh')
return
else:
db = DataBase(db_name, cookie)
showMessage('Torrent', '?????????? ???????-??????')
url = db.UpdateUrlsStream([params['id']])
xbmc.executebuiltin('Container.Refresh')
return
url = url[0]['urlstream']
if url != '':
out = None
if url.find('http://') == -1:
print 'TS PID---'+str(url)
out = TSPlayer.load_torrent(url,'PID',port=aceport)
print 'OUT PID---'+str(out)
else:
print 'TS TORRENT'
out = TSPlayer.load_torrent(url,'TORRENT',port=aceport)
if out == 'Ok':
print 'TS OK'
TSPlayer.play_url_ind(0,params['title'],addon_icon,params['img'])
db = DataBase(db_name, cookie)
db.IncChannel(params['id'])
del db
TSPlayer.end()
return
def play_ch_web(params):
xbmc.executebuiltin('Action(Stop)')
http = GET('http://torrent-tv.ru/' + params['file'])
if http == None:
http = GET('http://1ttv.org/' + params['file'])
if http == None:
showMessage('Torrent TV', '????? ?? ????????')
return
beautifulSoup = BeautifulSoup(http)
tget= beautifulSoup.find('div', attrs={'class':'tv-player'})
m=re.search('http:(.+)"', str(tget))
if m:
torr_link= m.group(0).split('"')[0]
m=re.search('http://[0-9]+.[0-9]+.[0-9]+.[0-9]+:[0-9]+', torr_link)
TSplayer=tsengine()
out=TSplayer.load_torrent(torr_link,'TORRENT',port=aceport)
if out=='Ok':
TSplayer.play_url_ind(0,params['title'],addon_icon,params['img'])
TSplayer.end()
#showMessage(message = 'Stop')
else:
m = re.search('load.*', str(tget))
ID = m.group(0).split('"')[1]
try:
TSplayer=tsengine()
out=TSplayer.load_torrent(ID,'PID',port=aceport)
if out=='Ok':
TSplayer.play_url_ind(0,params['title'],addon_icon,params['img'])
TSplayer.end()
except Exception, e:
showMessage(message = e)
xbmc.executebuiltin('Container.Refresh')
#showMessage(message = 'Stop')
def GetParts():
db = DataBase(db_name, cookie)
parts = db.GetParts(adult = adult)
refreshuri = construct_request({
'func': 'Refreshuri'
})
deldb = construct_request({
'func': 'DelDB',
})
commands = []
commands.append(('???????? ?????? ???????', 'XBMC.RunPlugin(%s)' % (refreshuri),))
commands.append(('??????? ?? ???????', 'XBMC.RunPlugin(%s)' % (deldb),))
for part in parts:
li = xbmcgui.ListItem(part['name'])
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsDB',
'group': part['id'],
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
def Refreshuri(params):
cookie = UpdCookie()
db = DataBase(db_name, cookie)
showMessage('Torrent TV', '???????????? ?????????? ?????????')
db.UpdateDB()
xbmc.executebuiltin('Container.Refresh')
showMessage('Torrent TV', '?????????? ????????? ?????????')
def mainScreen(params):
refreshuri = construct_request({
'func': 'Refreshuri'
})
deldb = construct_request({
'func': 'DelDB',
})
commands = []
commands.append(('???????? ?????? ???????', 'XBMC.RunPlugin(%s)' % (refreshuri),))
commands.append(('??????? ?? ???????', 'XBMC.RunPlugin(%s)' % (deldb),))
li = xbmcgui.ListItem('[COLOR FFB77D00]?????????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsDB',
'title': '?????????',
'group': 'favourite'
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
li = xbmcgui.ListItem('[COLOR FF00FF00]??? ??????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsDB',
'title': '??? ??????',
'group': '0'
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
li = xbmcgui.ListItem('[COLOR FF00FF00]????????? ?????????????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsDB',
'title': '????????? ?????????????',
'group': 'latest'
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
li = xbmcgui.ListItem('[COLOR FF00FF00]HD ??????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsDB',
'title': 'HD ??????',
'group': 'hd'
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
li = xbmcgui.ListItem('[COLOR FF00FF00]????? ??????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsDB',
'title': '????? ??????',
'group': 'new'
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
li = xbmcgui.ListItem('[COLOR FF0099FF]?? ?????????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsWeb',
'title': '?? ?????????',
'file': 'on_moderation.php'
})
xbmcplugin.addDirectoryItem(hos, uri, li, True)
li = xbmcgui.ListItem('[COLOR FF0099FF]??????????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetChannelsWeb',
'title': '??????????',
'file': 'translations.php'
})
li.addContextMenuItems(commands)
xbmcplugin.addDirectoryItem(hos, uri, li, True)
li = xbmcgui.ListItem('[COLOR FF6495ED]?????[/COLOR]')
li.addContextMenuItems(commands)
uri = construct_request({
'func': 'GetArchive',
'title': '?????',
'file': 'tv-archive.php'
})
li.addContextMenuItems(commands)
xbmcplugin.addDirectoryItem(hos, uri, li, True)
GetParts()
xbmcplugin.endOfDirectory(hos)
from urllib import unquote, quote, quote_plus
def get_params(paramstring):
param=[]
if len(paramstring)>=2:
params=paramstring
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
if len(param) > 0:
for cur in param:
param[cur] = urllib.unquote_plus(param[cur])
return param
def addon_main():
import datetime
params = get_params(sys.argv[2])
try:
func = params['func']
del params['func']
except:
db = DataBase(db_name, cookie='')
dbver = db.GetDBVer()
if db.GetDBVer() <> 6:
del db
os.remove(db_name)
db = DataBase(db_name, cookie='')
lupd = db.GetLastUpdate()
if lupd == None:
showMessage('Torrent TV', '???????????? ?????????? ?????????')
#UpdCookie()
#if os.path.exists(cookiefile):
#fgetcook = open(cookiefile, 'r')
cookie = UpdCookie()
#del fgetcook
db = DataBase(db_name, cookie)
db.UpdateDB()
showMessage('Torrent TV', '?????????? ????????? ?????????')
else:
nupd = lupd + datetime.timedelta(hours = 7)
if nupd < datetime.datetime.now():
showMessage('Torrent TV', '???????????? ?????????? ?????????')
#UpdCookie()
#if os.path.exists(cookiefile):
#fgetcook = open(cookiefile, 'r')
cookie = UpdCookie()
#del fgetcook
db = DataBase(db_name, cookie)
db.UpdateDB()
showMessage('Torrent TV', '?????????? ????????? ?????????')
del db
func = None
xbmc.log( '[%s]: Primary input' % addon_id, 1 )
mainScreen(params)
if func != None:
try:
pfunc = globals()[func]
except:
pfunc = None
xbmc.log( '[%s]: Function "%s" not found' % (addon_id, func), 4 )
showMessage('Internal addon error', 'Function "%s" not found' % func, 2000)
if pfunc:
pfunc(params)
| 31.744756 | 254 | 0.367901 | 8,047 | 98,377 | 4.450851 | 0.112589 | 0.005863 | 0.012285 | 0.005026 | 0.831695 | 0.811453 | 0.799391 | 0.774989 | 0.76477 | 0.760582 | 0 | 0.092924 | 0.169836 | 98,377 | 3,098 | 255 | 31.755003 | 0.345627 | 0.013052 | 0 | 0.766321 | 0 | 0.001004 | 0.550589 | 0.007669 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.001339 | 0.00837 | null | null | 0.002678 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
129655c4586964b5e51996aad736eccdf905a6dc | 5,700 | py | Python | tests/test_gpt2model.py | katsugeneration/gap-conf-kaggle | 6c91b40a0f7dd30b24cbc3cbab664d187540e159 | [
"MIT"
] | null | null | null | tests/test_gpt2model.py | katsugeneration/gap-conf-kaggle | 6c91b40a0f7dd30b24cbc3cbab664d187540e159 | [
"MIT"
] | null | null | null | tests/test_gpt2model.py | katsugeneration/gap-conf-kaggle | 6c91b40a0f7dd30b24cbc3cbab664d187540e159 | [
"MIT"
] | null | null | null | import pandas
import numpy as np
import utils
import gpt2_estimator
from models import gpt2_model
from nose.tools import eq_, ok_
def test_get_predictions():
gpt2_estimator.build()
predicts = gpt2_estimator.predict("Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.\nQ: What's her name?\nA:")
eq_('Cheryl', predicts[0][0])
eq_(np.float32, type(predicts[0][1]))
def test_get_scope_sentence():
words, index1 = utils.charpos_to_word_index(
"Zoe Telford -- played the police officer girlfriend of Simon, Maggie. Dumped by Simon in the final episode of series 1, after he slept with Jenny, and is not seen again. Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.",
274, 'her')
words, index2 = utils.charpos_to_word_index(
"Zoe Telford -- played the police officer girlfriend of Simon, Maggie. Dumped by Simon in the final episode of series 1, after he slept with Jenny, and is not seen again. Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.",
207, 'Pauline')
sentence = gpt2_estimator._get_scope_sentence(words, np.array([index1, index2]))
eq_("Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.", sentence)
def test_get_before_pronounce_sentence_case_true():
words, index1 = utils.charpos_to_word_index(
"Zoe Telford -- played the police officer girlfriend of Simon, Maggie. Dumped by Simon in the final episode of series 1, after he slept with Jenny, and is not seen again. Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.",
274, 'her')
is_possessive = gpt2_estimator._check_pronounce_is_possessive(words, index1)
eq_(True, is_possessive)
def test_get_before_pronounce_sentence_case_false():
words, index1 = utils.charpos_to_word_index(
"Her initial ambition was to become a ``cultured'' private secretary working in the highest cultural circles. A month later, the Michigan League building opened, serving co-eds as the Michigan Union served men. Hartwig began by doing secretarial work there, when Dr. Margaret Bell, the head of women's physical education, spotted her.",
329, 'her')
is_possessive = gpt2_estimator._check_pronounce_is_possessive(words, index1)
eq_(False, is_possessive)
def test_get_before_pronounce_sentence():
words, index1 = utils.charpos_to_word_index(
"Zoe Telford -- played the police officer girlfriend of Simon, Maggie. Dumped by Simon in the final episode of series 1, after he slept with Jenny, and is not seen again. Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.",
274, 'her')
sentence = gpt2_estimator._get_before_pronounce_sentence(words, index1)
eq_("Dumped", sentence)
def test_calcurate_likelihood():
words, index1 = utils.charpos_to_word_index(
"Zoe Telford -- played the police officer girlfriend of Simon, Maggie. Dumped by Simon in the final episode of series 1, after he slept with Jenny, and is not seen again. Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.",
274, 'her')
words, index2 = utils.charpos_to_word_index(
"Zoe Telford -- played the police officer girlfriend of Simon, Maggie. Dumped by Simon in the final episode of series 1, after he slept with Jenny, and is not seen again. Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.",
191, 'Cheryl')
words, index3 = utils.charpos_to_word_index(
"Zoe Telford -- played the police officer girlfriend of Simon, Maggie. Dumped by Simon in the final episode of series 1, after he slept with Jenny, and is not seen again. Phoebe Thomas played Cheryl Cassidy, Pauline's friend and also a year 11 pupil in Simon's class. Dumped her boyfriend following Simon's advice after he wouldn't have sex with her but later realised this was due to him catching crabs off her friend Pauline.",
207, 'Pauline')
predicts = gpt2_model.calcurate_likelihood(words, np.array([index1, index2, index3]))
ok_(predicts[0] > 0)
ok_(predicts[1] == 0)
ok_(predicts[2] == 0)
| 87.692308 | 441 | 0.750351 | 916 | 5,700 | 4.574236 | 0.161572 | 0.025776 | 0.038663 | 0.051551 | 0.822673 | 0.811217 | 0.802387 | 0.784487 | 0.764916 | 0.764916 | 0 | 0.018627 | 0.19 | 5,700 | 64 | 442 | 89.0625 | 0.888889 | 0 | 0 | 0.423077 | 0 | 0.192308 | 0.684035 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.115385 | false | 0 | 0.115385 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
12a99993ab7f11813de96a71d95402ce5bf52ca5 | 8,725 | py | Python | models/registry.py | ritwikraha/SPACH | 21d2d0a4eddab6f7e17c602d39cb419014363234 | [
"MIT"
] | 104 | 2021-11-25T17:19:45.000Z | 2022-03-23T01:36:54.000Z | models/registry.py | ritwikraha/SPACH | 21d2d0a4eddab6f7e17c602d39cb419014363234 | [
"MIT"
] | 2 | 2022-01-27T12:58:31.000Z | 2022-02-22T09:56:19.000Z | models/registry.py | ritwikraha/SPACH | 21d2d0a4eddab6f7e17c602d39cb419014363234 | [
"MIT"
] | 11 | 2021-12-02T07:47:52.000Z | 2022-03-14T05:00:34.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from timm.models.registry import register_model
from .smlp import sMLPNet
from .spach import Spach, SpachMS
from .shiftvit import ShiftViT
# sMLP
@register_model
def smlpnet_tiny(pretrained=False, **kwargs):
model = sMLPNet(dim=80, alpha=3, patch_size=4, depths=[2,8,14,2], dp_rate=0.0, **kwargs)
return model
@register_model
def smlpnet_small(pretrained=False, **kwargs):
model = sMLPNet(dim=96, alpha=3, patch_size=4, depths=[2,10,24,2], dp_rate=0.2, **kwargs)
return model
@register_model
def smlpnet_base(pretrained=False, **kwargs):
model = sMLPNet(dim=112, alpha=3, patch_size=4, depths=[2,10,24,2], dp_rate=0.3, **kwargs)
return model
# SPACH
@register_model
def spach_xxs_patch16_224_mlp(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=384, token_ratio=0.5, num_heads=12, channel_ratio=2.0)
cfgs['net_arch'] = [('mlp', 12)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_xxs_patch16_224_conv(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=384, token_ratio=0.5, num_heads=12, channel_ratio=2.0)
cfgs['net_arch'] = [('pass', 12)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_xxs_patch16_224_attn(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=192, token_ratio=0.5, num_heads=6, channel_ratio=2.0)
cfgs['net_arch'] = [('attn', 12)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_xs_patch16_224_mlp(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=384, token_ratio=0.5, num_heads=12, channel_ratio=2.0)
cfgs['net_arch'] = [('mlp', 24)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_xs_patch16_224_conv(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=384, token_ratio=0.5, num_heads=12, channel_ratio=2.0)
cfgs['net_arch'] = [('pass', 24)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_xs_patch16_224_attn(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=384, token_ratio=0.5, num_heads=12, channel_ratio=2.0)
cfgs['net_arch'] = [('attn', 12)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_s_patch16_224_mlp(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=512, token_ratio=0.5, num_heads=16, channel_ratio=3.0)
cfgs['net_arch'] = [('mlp', 24)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_s_patch16_224_conv(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=512, token_ratio=0.5, num_heads=16, channel_ratio=3.0)
cfgs['net_arch'] = [('pass', 24)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_s_patch16_224_attn(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=16, hidden_dim=512, token_ratio=0.5, num_heads=16, channel_ratio=3.0)
cfgs['net_arch'] = [('attn', 12)]
cfgs.update(kwargs)
model = Spach(**cfgs)
return model
@register_model
def spach_ms_xxs_patch4_224_conv(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=64, token_ratio=0.5, num_heads=2, channel_ratio=2.0)
cfgs['net_arch'] = [[('pass', 2)], [('pass', 2)], [('pass', 6)], [('pass', 2)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_xxs_patch4_224_mlp(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=64, token_ratio=0.5, num_heads=2, channel_ratio=2.0)
cfgs['net_arch'] = [[('pass', 2)], [('mlp', 2)], [('mlp', 6)], [('mlp', 2)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_xxs_patch4_224_attn(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=32, token_ratio=0.5, num_heads=1, channel_ratio=2.0)
cfgs['net_arch'] = [[('pass', 2)], [('attn', 2)], [('attn', 6)], [('attn', 2)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_xs_patch4_224_conv(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=96, token_ratio=0.5, num_heads=3, channel_ratio=2.0)
cfgs['net_arch'] = [[('pass', 3)], [('pass', 4)], [('pass', 12)], [('pass', 3)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_xs_patch4_224_mlp(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=96, token_ratio=0.5, num_heads=3, channel_ratio=2.0)
cfgs['net_arch'] = [[('pass', 3)], [('mlp', 4)], [('mlp', 12)], [('mlp', 3)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_xs_patch4_224_attn(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=64, token_ratio=0.5, num_heads=2, channel_ratio=2.0)
cfgs['net_arch'] = [[('pass', 3)], [('attn', 4)], [('attn', 12)], [('attn', 3)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_s_patch4_224_conv(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=128, token_ratio=0.5, num_heads=4, channel_ratio=3.0)
cfgs['net_arch'] = [[('pass', 3)], [('pass', 4)], [('pass', 12)], [('pass', 3)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_s_patch4_224_mlp(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=128, token_ratio=0.5, num_heads=4, channel_ratio=3.0)
cfgs['net_arch'] = [[('pass', 3)], [('mlp', 4)], [('mlp', 12)], [('mlp', 3)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_s_patch4_224_attn(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=96, token_ratio=0.5, num_heads=3, channel_ratio=3.0)
cfgs['net_arch'] = [[('pass', 3)], [('attn', 4)], [('attn', 12)], [('attn', 3)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_xs_patch4_224_hybrid(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=96, token_ratio=0.5, num_heads=3, channel_ratio=2.0)
cfgs['net_arch'] = [[('pass', 3)], [('pass', 4)], [('pass', 2), ('attn', 10)], [('pass', 1), ('attn', 2)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
@register_model
def spach_ms_s_patch4_224_hybrid(pretrained=False, **kwargs):
cfgs = dict(img_size=224, patch_size=4, hidden_dim=128, token_ratio=0.5, num_heads=4, channel_ratio=3.0)
cfgs['net_arch'] = [[('pass', 3)], [('pass', 2), ('attn', 2)], [('pass', 2), ('attn', 10)], [('pass', 1), ('attn', 2)]]
cfgs.update(kwargs)
model = SpachMS(**cfgs)
return model
# shift vit
@register_model
def shiftvit_light_tiny(**kwargs):
model = ShiftViT(embed_dim=96, depths=(2, 2, 6, 2), mlp_ratio=4, drop_path_rate=0.2, n_div=12)
return model
@register_model
def shiftvit_r4_tiny(**kwargs):
model = ShiftViT(embed_dim=96, depths=(2, 2, 12, 3), mlp_ratio=4, drop_path_rate=0.2, n_div=12)
return model
@register_model
def shiftvit_r2_tiny(**kwargs):
model = ShiftViT(embed_dim=96, depths=(6, 8, 18, 6), mlp_ratio=2, drop_path_rate=0.2, n_div=12)
return model
@register_model
def shiftvit_light_small(**kwargs):
model = ShiftViT(embed_dim=96, depths=(2, 2, 18, 2), mlp_ratio=4, drop_path_rate=0.4, n_div=12)
return model
@register_model
def shiftvit_r4_small(**kwargs):
model = ShiftViT(embed_dim=96, depths=(2, 6, 24, 4), mlp_ratio=4, drop_path_rate=0.4, n_div=12)
return model
@register_model
def shiftvit_r2_small(**kwargs):
model = ShiftViT(embed_dim=96, depths=(10, 18, 36, 10), mlp_ratio=2, drop_path_rate=0.4, n_div=12)
return model
@register_model
def shiftvit_light_base(**kwargs):
model = ShiftViT(embed_dim=128, depths=(2, 2, 18, 2), mlp_ratio=4, drop_path_rate=0.5, n_div=16)
return model
@register_model
def shiftvit_r4_base(**kwargs):
model = ShiftViT(embed_dim=128, depths=(4, 6, 22, 4), mlp_ratio=4, drop_path_rate=0.5, n_div=16)
return model
@register_model
def shiftvit_r2_base(**kwargs):
model = ShiftViT(embed_dim=128, depths=(10, 18, 36, 10), mlp_ratio=2, drop_path_rate=0.6, n_div=16)
return model
| 33.301527 | 123 | 0.678854 | 1,397 | 8,725 | 3.999284 | 0.068003 | 0.076785 | 0.091641 | 0.124575 | 0.941471 | 0.933238 | 0.906748 | 0.88169 | 0.848756 | 0.829783 | 0 | 0.076778 | 0.147622 | 8,725 | 261 | 124 | 33.429119 | 0.674466 | 0.010201 | 0 | 0.697917 | 0 | 0 | 0.044032 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0.072917 | 0.020833 | 0 | 0.354167 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
12c1115a53b2c26e367b2b6624f4ba59a18a6491 | 90 | py | Python | drlgeb/ac/__init__.py | mikuh/drlgeb | 5b70834fba6c550f319ea202a691394c2e99e8b5 | [
"MIT"
] | null | null | null | drlgeb/ac/__init__.py | mikuh/drlgeb | 5b70834fba6c550f319ea202a691394c2e99e8b5 | [
"MIT"
] | null | null | null | drlgeb/ac/__init__.py | mikuh/drlgeb | 5b70834fba6c550f319ea202a691394c2e99e8b5 | [
"MIT"
] | null | null | null | from drlgeb.ac.model import ActorCriticModel
from drlgeb.ac.batch_a3c import Master as A3C | 45 | 45 | 0.855556 | 15 | 90 | 5.066667 | 0.666667 | 0.263158 | 0.315789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.024691 | 0.1 | 90 | 2 | 45 | 45 | 0.91358 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
12f3c81f50f65fe057d68a110ed783f3ae53debf | 31,791 | py | Python | cloudendure/cloudendure_api/api/machines_api.py | cloudreach/cloudendure-python | fe922c3e28d309acceea131c55c6b2ce92a6f568 | [
"MIT"
] | 11 | 2019-10-15T20:28:31.000Z | 2021-11-02T03:18:33.000Z | cloudendure/cloudendure_api/api/machines_api.py | cloudreach/cloudendure-python | fe922c3e28d309acceea131c55c6b2ce92a6f568 | [
"MIT"
] | 31 | 2019-09-04T12:47:03.000Z | 2021-07-23T15:23:18.000Z | cloudendure/cloudendure_api/api/machines_api.py | cloudreach/cloudendure-python | fe922c3e28d309acceea131c55c6b2ce92a6f568 | [
"MIT"
] | 9 | 2019-09-05T18:26:17.000Z | 2021-11-02T03:18:35.000Z | # coding: utf-8
"""
CloudEndure API documentation
© 2017 CloudEndure All rights reserved # General Request authentication in CloudEndure's API is done using session cookies. A session cookie is returned upon successful execution of the \"login\" method. This value must then be provided within the request headers of all subsequent API requests. ## Errors Some errors are not specifically written in every method since they may always return. Those are: 1) 401 (Unauthorized) - for unauthenticated requests. 2) 405 (Method Not Allowed) - for using a method that is not supported (POST instead of GET). 3) 403 (Forbidden) - request is authenticated, but the user is not allowed to access. 4) 422 (Unprocessable Entity) - for invalid input. ## Formats All strings with date-time format are according to RFC3339. All strings with \"duration\" format are according to ISO8601. For example, a full day duration can be specified with \"PNNNND\". # noqa: E501
OpenAPI spec version: 5
Contact: https://bit.ly/2T54hSc
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from cloudendure.cloudendure_api.api_client import ApiClient
class MachinesApi:
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def projects_project_id_machines_delete(
self, body, project_id, **kwargs
): # noqa: E501
"""Uninstall agent # noqa: E501
Stops replication and removes the cloudendure agent from the specified machines. All cloud artifacts associated with those machines with the exception of launched target machines are deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_delete(body, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: The list of machine IDs to remove from the CloudEndure service. (required)
:param str project_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.projects_project_id_machines_delete_with_http_info(
body, project_id, **kwargs
) # noqa: E501
else:
(data) = self.projects_project_id_machines_delete_with_http_info(
body, project_id, **kwargs
) # noqa: E501
return data
def projects_project_id_machines_delete_with_http_info(
self, body, project_id, **kwargs
): # noqa: E501
"""Uninstall agent # noqa: E501
Stops replication and removes the cloudendure agent from the specified machines. All cloud artifacts associated with those machines with the exception of launched target machines are deleted. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_delete_with_http_info(body, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: The list of machine IDs to remove from the CloudEndure service. (required)
:param str project_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ["body", "project_id"] # noqa: E501
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
params = locals()
for key, val in six.iteritems(params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_machines_delete" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'body' is set
if "body" not in params or params["body"] is None:
raise ValueError(
"Missing the required parameter `body` when calling `projects_project_id_machines_delete`"
) # noqa: E501
# verify the required parameter 'project_id' is set
if "project_id" not in params or params["project_id"] is None:
raise ValueError(
"Missing the required parameter `project_id` when calling `projects_project_id_machines_delete`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "project_id" in params:
path_params["projectId"] = params["project_id"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "body" in params:
body_params = params["body"]
# HTTP header `Content-Type`
header_params[
"Content-Type"
] = self.api_client.select_header_content_type( # noqa: E501
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
"/projects/{projectId}/machines",
"DELETE",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get("async_req"),
_return_http_data_only=params.get("_return_http_data_only"),
_preload_content=params.get("_preload_content", True),
_request_timeout=params.get("_request_timeout"),
collection_formats=collection_formats,
)
def projects_project_id_machines_get(self, project_id, **kwargs): # noqa: E501
"""List Machines # noqa: E501
Returns the list of all source machines in the Project (i.e. machines that have an Agent installed). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_get(project_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_id: (required)
:param int offset: With which item to start (0 based).
:param int limit: A number specifying how many entries to return.
:param bool all: When set to false, returns only currently replicating machines. When set to true, returns all machines in the project regardless of replications status. machines are consuming/ have consumed licenses. Note that some license types are transferable and therefore once you remove the and set to true false, returns only currently replicating machines. When set to true, returns all machines in the project regardless of replications status.
:param str types: Use this url query param to control which machines are returned when doing GET. If you do not include the \\\"types\\\" query param, you will only get source machines
:return: CloudEndureMachinesList
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.projects_project_id_machines_get_with_http_info(
project_id, **kwargs
) # noqa: E501
else:
(data) = self.projects_project_id_machines_get_with_http_info(
project_id, **kwargs
) # noqa: E501
return data
def projects_project_id_machines_get_with_http_info(
self, project_id, **kwargs
): # noqa: E501
"""List Machines # noqa: E501
Returns the list of all source machines in the Project (i.e. machines that have an Agent installed). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_get_with_http_info(project_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_id: (required)
:param int offset: With which item to start (0 based).
:param int limit: A number specifying how many entries to return.
:param bool all: When set to false, returns only currently replicating machines. When set to true, returns all machines in the project regardless of replications status. machines are consuming/ have consumed licenses. Note that some license types are transferable and therefore once you remove the and set to true false, returns only currently replicating machines. When set to true, returns all machines in the project regardless of replications status.
:param str types: Use this url query param to control which machines are returned when doing GET. If you do not include the \\\"types\\\" query param, you will only get source machines
:return: CloudEndureMachinesList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ["project_id", "offset", "limit", "all", "types"] # noqa: E501
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
params = locals()
for key, val in six.iteritems(params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_machines_get" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'project_id' is set
if "project_id" not in params or params["project_id"] is None:
raise ValueError(
"Missing the required parameter `project_id` when calling `projects_project_id_machines_get`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "project_id" in params:
path_params["projectId"] = params["project_id"] # noqa: E501
query_params = []
if "offset" in params:
query_params.append(("offset", params["offset"])) # noqa: E501
if "limit" in params:
query_params.append(("limit", params["limit"])) # noqa: E501
if "all" in params:
query_params.append(("all", params["all"])) # noqa: E501
if "types" in params:
query_params.append(("types", params["types"])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
"/projects/{projectId}/machines",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="CloudEndureMachinesList", # noqa: E501
auth_settings=auth_settings,
async_req=params.get("async_req"),
_return_http_data_only=params.get("_return_http_data_only"),
_preload_content=params.get("_preload_content", True),
_request_timeout=params.get("_request_timeout"),
collection_formats=collection_formats,
)
def projects_project_id_machines_machine_id_get(
self, project_id, machine_id, **kwargs
): # noqa: E501
"""Get a specific machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_machine_id_get(project_id, machine_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_id: (required)
:param str machine_id: (required)
:return: CloudEndureMachine
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.projects_project_id_machines_machine_id_get_with_http_info(
project_id, machine_id, **kwargs
) # noqa: E501
else:
(data) = self.projects_project_id_machines_machine_id_get_with_http_info(
project_id, machine_id, **kwargs
) # noqa: E501
return data
def projects_project_id_machines_machine_id_get_with_http_info(
self, project_id, machine_id, **kwargs
): # noqa: E501
"""Get a specific machine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_machine_id_get_with_http_info(project_id, machine_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_id: (required)
:param str machine_id: (required)
:return: CloudEndureMachine
If the method is called asynchronously,
returns the request thread.
"""
all_params = ["project_id", "machine_id"] # noqa: E501
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
params = locals()
for key, val in six.iteritems(params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_machines_machine_id_get" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'project_id' is set
if "project_id" not in params or params["project_id"] is None:
raise ValueError(
"Missing the required parameter `project_id` when calling `projects_project_id_machines_machine_id_get`"
) # noqa: E501
# verify the required parameter 'machine_id' is set
if "machine_id" not in params or params["machine_id"] is None:
raise ValueError(
"Missing the required parameter `machine_id` when calling `projects_project_id_machines_machine_id_get`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "project_id" in params:
path_params["projectId"] = params["project_id"] # noqa: E501
if "machine_id" in params:
path_params["machineId"] = params["machine_id"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
"/projects/{projectId}/machines/{machineId}",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="CloudEndureMachine", # noqa: E501
auth_settings=auth_settings,
async_req=params.get("async_req"),
_return_http_data_only=params.get("_return_http_data_only"),
_preload_content=params.get("_preload_content", True),
_request_timeout=params.get("_request_timeout"),
collection_formats=collection_formats,
)
def projects_project_id_machines_machine_id_patch(
self, body, project_id, machine_id, **kwargs
): # noqa: E501
"""Update a machine. Accepts only Launch time updates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_machine_id_patch(body, project_id, machine_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CloudEndureMachine body: (required)
:param str project_id: (required)
:param str machine_id: (required)
:return: CloudEndureMachine
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.projects_project_id_machines_machine_id_patch_with_http_info(
body, project_id, machine_id, **kwargs
) # noqa: E501
else:
(data) = self.projects_project_id_machines_machine_id_patch_with_http_info(
body, project_id, machine_id, **kwargs
) # noqa: E501
return data
def projects_project_id_machines_machine_id_patch_with_http_info(
self, body, project_id, machine_id, **kwargs
): # noqa: E501
"""Update a machine. Accepts only Launch time updates. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_machine_id_patch_with_http_info(body, project_id, machine_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CloudEndureMachine body: (required)
:param str project_id: (required)
:param str machine_id: (required)
:return: CloudEndureMachine
If the method is called asynchronously,
returns the request thread.
"""
all_params = ["body", "project_id", "machine_id"] # noqa: E501
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
params = locals()
for key, val in six.iteritems(params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_machines_machine_id_patch" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'body' is set
if "body" not in params or params["body"] is None:
raise ValueError(
"Missing the required parameter `body` when calling `projects_project_id_machines_machine_id_patch`"
) # noqa: E501
# verify the required parameter 'project_id' is set
if "project_id" not in params or params["project_id"] is None:
raise ValueError(
"Missing the required parameter `project_id` when calling `projects_project_id_machines_machine_id_patch`"
) # noqa: E501
# verify the required parameter 'machine_id' is set
if "machine_id" not in params or params["machine_id"] is None:
raise ValueError(
"Missing the required parameter `machine_id` when calling `projects_project_id_machines_machine_id_patch`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "project_id" in params:
path_params["projectId"] = params["project_id"] # noqa: E501
if "machine_id" in params:
path_params["machineId"] = params["machine_id"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "body" in params:
body_params = params["body"]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# HTTP header `Content-Type`
header_params[
"Content-Type"
] = self.api_client.select_header_content_type( # noqa: E501
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
"/projects/{projectId}/machines/{machineId}",
"PATCH",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="CloudEndureMachine", # noqa: E501
auth_settings=auth_settings,
async_req=params.get("async_req"),
_return_http_data_only=params.get("_return_http_data_only"),
_preload_content=params.get("_preload_content", True),
_request_timeout=params.get("_request_timeout"),
collection_formats=collection_formats,
)
def projects_project_id_machines_patch(
self, body, project_id, **kwargs
): # noqa: E501
"""Batch-update multiple machines # noqa: E501
todo must allow update of tags, update of replicationConfiguration; may allow update of launch times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_patch(body, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CloudEndureMachinesList body: (required)
:param str project_id: (required)
:return: CloudEndureMachinesList
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.projects_project_id_machines_patch_with_http_info(
body, project_id, **kwargs
) # noqa: E501
else:
(data) = self.projects_project_id_machines_patch_with_http_info(
body, project_id, **kwargs
) # noqa: E501
return data
def projects_project_id_machines_patch_with_http_info(
self, body, project_id, **kwargs
): # noqa: E501
"""Batch-update multiple machines # noqa: E501
todo must allow update of tags, update of replicationConfiguration; may allow update of launch times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_machines_patch_with_http_info(body, project_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CloudEndureMachinesList body: (required)
:param str project_id: (required)
:return: CloudEndureMachinesList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ["body", "project_id"] # noqa: E501
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
params = locals()
for key, val in six.iteritems(params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_machines_patch" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'body' is set
if "body" not in params or params["body"] is None:
raise ValueError(
"Missing the required parameter `body` when calling `projects_project_id_machines_patch`"
) # noqa: E501
# verify the required parameter 'project_id' is set
if "project_id" not in params or params["project_id"] is None:
raise ValueError(
"Missing the required parameter `project_id` when calling `projects_project_id_machines_patch`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "project_id" in params:
path_params["projectId"] = params["project_id"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if "body" in params:
body_params = params["body"]
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# HTTP header `Content-Type`
header_params[
"Content-Type"
] = self.api_client.select_header_content_type( # noqa: E501
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
"/projects/{projectId}/machines",
"PATCH",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="CloudEndureMachinesList", # noqa: E501
auth_settings=auth_settings,
async_req=params.get("async_req"),
_return_http_data_only=params.get("_return_http_data_only"),
_preload_content=params.get("_preload_content", True),
_request_timeout=params.get("_request_timeout"),
collection_formats=collection_formats,
)
def projects_project_id_replicas_replica_id_get(
self, project_id, replica_id, **kwargs
): # noqa: E501
"""Get Target Machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_replicas_replica_id_get(project_id, replica_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_id: (required)
:param str replica_id: (required)
:return: CloudEndureReplica
If the method is called asynchronously,
returns the request thread.
"""
kwargs["_return_http_data_only"] = True
if kwargs.get("async_req"):
return self.projects_project_id_replicas_replica_id_get_with_http_info(
project_id, replica_id, **kwargs
) # noqa: E501
else:
(data) = self.projects_project_id_replicas_replica_id_get_with_http_info(
project_id, replica_id, **kwargs
) # noqa: E501
return data
def projects_project_id_replicas_replica_id_get_with_http_info(
self, project_id, replica_id, **kwargs
): # noqa: E501
"""Get Target Machine # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.projects_project_id_replicas_replica_id_get_with_http_info(project_id, replica_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_id: (required)
:param str replica_id: (required)
:return: CloudEndureReplica
If the method is called asynchronously,
returns the request thread.
"""
all_params = ["project_id", "replica_id"] # noqa: E501
all_params.append("async_req")
all_params.append("_return_http_data_only")
all_params.append("_preload_content")
all_params.append("_request_timeout")
params = locals()
for key, val in six.iteritems(params["kwargs"]):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method projects_project_id_replicas_replica_id_get" % key
)
params[key] = val
del params["kwargs"]
# verify the required parameter 'project_id' is set
if "project_id" not in params or params["project_id"] is None:
raise ValueError(
"Missing the required parameter `project_id` when calling `projects_project_id_replicas_replica_id_get`"
) # noqa: E501
# verify the required parameter 'replica_id' is set
if "replica_id" not in params or params["replica_id"] is None:
raise ValueError(
"Missing the required parameter `replica_id` when calling `projects_project_id_replicas_replica_id_get`"
) # noqa: E501
collection_formats = {}
path_params = {}
if "project_id" in params:
path_params["projectId"] = params["project_id"] # noqa: E501
if "replica_id" in params:
path_params["replicaId"] = params["replica_id"] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
"/projects/{projectId}/replicas/{replicaId}",
"GET",
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type="CloudEndureReplica", # noqa: E501
auth_settings=auth_settings,
async_req=params.get("async_req"),
_return_http_data_only=params.get("_return_http_data_only"),
_preload_content=params.get("_preload_content", True),
_request_timeout=params.get("_request_timeout"),
collection_formats=collection_formats,
)
| 42.16313 | 912 | 0.624957 | 3,707 | 31,791 | 5.102509 | 0.079849 | 0.068517 | 0.048533 | 0.059477 | 0.927518 | 0.920011 | 0.91668 | 0.911869 | 0.907639 | 0.904362 | 0 | 0.01476 | 0.29458 | 31,791 | 753 | 913 | 42.219124 | 0.828636 | 0.362996 | 0 | 0.742729 | 0 | 0 | 0.207747 | 0.073813 | 0 | 0 | 0 | 0.002656 | 0 | 1 | 0.029083 | false | 0 | 0.008949 | 0 | 0.080537 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
42098d2c43428e0f418dfdcc85a3d982146d118b | 93 | py | Python | access_restricted/models/__init__.py | trojikman/access-addons | 5b056b4d0928e83f687ea7978de6f96f826c28a6 | [
"MIT"
] | null | null | null | access_restricted/models/__init__.py | trojikman/access-addons | 5b056b4d0928e83f687ea7978de6f96f826c28a6 | [
"MIT"
] | null | null | null | access_restricted/models/__init__.py | trojikman/access-addons | 5b056b4d0928e83f687ea7978de6f96f826c28a6 | [
"MIT"
] | 1 | 2020-11-24T01:29:44.000Z | 2020-11-24T01:29:44.000Z | from . import res_config_settings
from . import res_users
from . import test_config_settings
| 23.25 | 34 | 0.83871 | 14 | 93 | 5.214286 | 0.5 | 0.410959 | 0.356164 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 93 | 3 | 35 | 31 | 0.901235 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
35ea4afbf2a330d390cc444e517730657d703d8b | 2,907 | py | Python | test/deepclustering/dataset/segmentation/test_prostate_dataset.py | jizongFox/deep-clustering-toolbox | 0721cbbb278af027409ed4c115ccc743b6daed1b | [
"MIT"
] | 34 | 2019-08-05T03:48:36.000Z | 2022-03-29T03:04:51.000Z | test/deepclustering/dataset/segmentation/test_prostate_dataset.py | jizongFox/deep-clustering-toolbox | 0721cbbb278af027409ed4c115ccc743b6daed1b | [
"MIT"
] | 10 | 2019-05-03T21:02:50.000Z | 2021-12-23T08:01:30.000Z | test/deepclustering/dataset/segmentation/test_prostate_dataset.py | ETS-Research-Repositories/deep-clustering-toolbox | 0721cbbb278af027409ed4c115ccc743b6daed1b | [
"MIT"
] | 5 | 2019-09-29T07:56:03.000Z | 2021-04-22T12:08:50.000Z | from unittest import TestCase
from deepclustering.dataset.segmentation.prostate_dataset import ProstateDataset
from pathlib import Path
import shutil, os
class TestDownloadDataset(TestCase):
def setUp(self) -> None:
super().setUp()
self.dataset_root = "./"
self.dataset_subfolders = ["img", "gt"]
if Path(self.dataset_root, ProstateDataset.folder_name).exists():
shutil.rmtree(
Path(self.dataset_root, ProstateDataset.folder_name), ignore_errors=True
)
if Path(self.dataset_root, ProstateDataset.zip_name).exists():
os.remove(Path(self.dataset_root, ProstateDataset.zip_name))
def test_download_dataset(self):
dataset = ProstateDataset(
root_dir=self.dataset_root,
subfolders=self.dataset_subfolders,
verbose=True,
mode="train",
)
assert len(dataset) == 1129
assert dataset.get_group_list().__len__() == 40
dataset = ProstateDataset(
root_dir=self.dataset_root,
subfolders=self.dataset_subfolders,
verbose=True,
mode="val",
)
assert len(dataset) == 248
assert dataset.get_group_list().__len__() == 10
def tearDown(self) -> None:
super().tearDown()
if Path(self.dataset_root, ProstateDataset.folder_name).exists():
shutil.rmtree(
Path(self.dataset_root, ProstateDataset.folder_name), ignore_errors=True
)
if Path(self.dataset_root, ProstateDataset.zip_name).exists():
os.remove(Path(self.dataset_root, ProstateDataset.zip_name))
class Test_ACDCDataset(TestCase):
def setUp(self) -> None:
super().setUp()
self.dataset_root = "./"
self.dataset_subfolders = ["img", "gt"]
if Path(self.dataset_root, ProstateDataset.folder_name).exists():
shutil.rmtree(
Path(self.dataset_root, ProstateDataset.folder_name), ignore_errors=True
)
if Path(self.dataset_root, ProstateDataset.zip_name).exists():
os.remove(Path(self.dataset_root, ProstateDataset.zip_name))
def test_dataset_iteration(self):
dataset = ProstateDataset(
root_dir=self.dataset_root,
subfolders=self.dataset_subfolders,
verbose=True,
mode="train",
)
for i in range(len(dataset)):
(img, gt), filename = dataset[i]
def tearDown(self) -> None:
super().tearDown()
if Path(self.dataset_root, ProstateDataset.folder_name).exists():
shutil.rmtree(
Path(self.dataset_root, ProstateDataset.folder_name), ignore_errors=True
)
if Path(self.dataset_root, ProstateDataset.zip_name).exists():
os.remove(Path(self.dataset_root, ProstateDataset.zip_name))
| 36.797468 | 88 | 0.627451 | 310 | 2,907 | 5.667742 | 0.187097 | 0.175299 | 0.179283 | 0.173022 | 0.830962 | 0.830962 | 0.799089 | 0.799089 | 0.799089 | 0.799089 | 0 | 0.005152 | 0.265566 | 2,907 | 78 | 89 | 37.269231 | 0.817799 | 0 | 0 | 0.676471 | 0 | 0 | 0.009288 | 0 | 0 | 0 | 0 | 0 | 0.058824 | 1 | 0.088235 | false | 0 | 0.058824 | 0 | 0.176471 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
35ebbb4b227a97b13090224fa5c6d93d7d1d63fb | 137 | py | Python | classifiers/__init__.py | cuongdtnguyen/poster-reader | 78f5693d86ac47c3d6329cf0ad4348fc6b73ec8b | [
"MIT"
] | 1 | 2017-05-31T04:37:13.000Z | 2017-05-31T04:37:13.000Z | classifiers/__init__.py | cuongdtnguyen/poster-reader | 78f5693d86ac47c3d6329cf0ad4348fc6b73ec8b | [
"MIT"
] | 4 | 2020-11-13T17:22:27.000Z | 2022-01-13T00:37:18.000Z | classifiers/__init__.py | cuongdtnguyen/poster-reader | 78f5693d86ac47c3d6329cf0ad4348fc6b73ec8b | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from .fc_net import *
from .conv_net import *
from .conv_net_2 import *
from .conv_net_3 import * | 22.833333 | 38 | 0.79562 | 23 | 137 | 4.26087 | 0.391304 | 0.408163 | 0.428571 | 0.520408 | 0.377551 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017094 | 0.145985 | 137 | 6 | 39 | 22.833333 | 0.820513 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
35ec73034046826aa99c08d9a40a714bd87954b0 | 6,161 | py | Python | oops_fhir/r4/code_system/standards_status.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/code_system/standards_status.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | oops_fhir/r4/code_system/standards_status.py | Mikuana/oops_fhir | 77963315d123756b7d21ae881f433778096a1d25 | [
"MIT"
] | null | null | null | from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["StandardsStatus"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class StandardsStatus:
"""
StandardsStatus
HL7 Ballot/Standards status of artifact.
Status: draft - Version: 4.0.1
Copyright None
http://terminology.hl7.org/CodeSystem/standards-status
"""
draft = CodeSystemConcept(
{
"code": "draft",
"definition": 'This portion of the specification is not considered to be complete enough or sufficiently reviewed to be safe for implementation. It may have known issues or still be in the "in development" stage. It is included in the publication as a place-holder, to solicit feedback from the implementation community and/or to give implementers some insight as to functionality likely to be included in future versions of the specification. Content at this level should only be implemented by the brave or desperate and is very much "use at your own risk". The content that is Draft that will usually be elevated to Trial Use once review and correction is complete after it has been subjected to ballot.',
"display": "Draft",
}
)
"""
Draft
This portion of the specification is not considered to be complete enough or sufficiently reviewed to be safe for implementation. It may have known issues or still be in the "in development" stage. It is included in the publication as a place-holder, to solicit feedback from the implementation community and/or to give implementers some insight as to functionality likely to be included in future versions of the specification. Content at this level should only be implemented by the brave or desperate and is very much "use at your own risk". The content that is Draft that will usually be elevated to Trial Use once review and correction is complete after it has been subjected to ballot.
"""
normative = CodeSystemConcept(
{
"code": "normative",
"definition": "This content has been subject to review and production implementation in a wide variety of environments. The content is considered to be stable and has been 'locked', subjecting it to FHIR Inter-version Compatibility Rules. While changes are possible, they are expected to be infrequent and are tightly constrained. Compatibility Rules.",
"display": "Normative",
}
)
"""
Normative
This content has been subject to review and production implementation in a wide variety of environments. The content is considered to be stable and has been 'locked', subjecting it to FHIR Inter-version Compatibility Rules. While changes are possible, they are expected to be infrequent and are tightly constrained. Compatibility Rules.
"""
trial_use = CodeSystemConcept(
{
"code": "trial-use",
"definition": "This content has been well reviewed and is considered by the authors to be ready for use in production systems. It has been subjected to ballot and approved as an official standard. However, it has not yet seen widespread use in production across the full spectrum of environments it is intended to be used in. In some cases, there may be documented known issues that require implementation experience to determine appropriate resolutions for.\n\nFuture versions of FHIR may make significant changes to Trial Use content that are not compatible with previously published content.",
"display": "Trial-Use",
}
)
"""
Trial-Use
This content has been well reviewed and is considered by the authors to be ready for use in production systems. It has been subjected to ballot and approved as an official standard. However, it has not yet seen widespread use in production across the full spectrum of environments it is intended to be used in. In some cases, there may be documented known issues that require implementation experience to determine appropriate resolutions for.
Future versions of FHIR may make significant changes to Trial Use content that are not compatible with previously published content.
"""
informative = CodeSystemConcept(
{
"code": "informative",
"definition": "This portion of the specification is provided for implementer assistance, and does not make rules that implementers are required to follow. Typical examples of this content in the FHIR specification are tables of contents, registries, examples, and implementer advice.",
"display": "Informative",
}
)
"""
Informative
This portion of the specification is provided for implementer assistance, and does not make rules that implementers are required to follow. Typical examples of this content in the FHIR specification are tables of contents, registries, examples, and implementer advice.
"""
deprecated = CodeSystemConcept(
{
"code": "deprecated",
"definition": "This portion of the specification is provided for implementer assistance, and does not make rules that implementers are required to follow. Typical examples of this content in the FHIR specification are tables of contents, registries, examples, and implementer advice.",
"display": "Deprecated",
}
)
"""
Deprecated
This portion of the specification is provided for implementer assistance, and does not make rules that implementers are required to follow. Typical examples of this content in the FHIR specification are tables of contents, registries, examples, and implementer advice.
"""
external = CodeSystemConcept(
{
"code": "external",
"definition": "This is content that is managed outside the FHIR Specification, but included for implementer convenience.",
"display": "External",
}
)
"""
External
This is content that is managed outside the FHIR Specification, but included for implementer convenience.
"""
class Meta:
resource = _resource
| 57.046296 | 720 | 0.731212 | 818 | 6,161 | 5.49022 | 0.222494 | 0.012469 | 0.032064 | 0.021376 | 0.838121 | 0.833667 | 0.833667 | 0.831441 | 0.831441 | 0.831441 | 0 | 0.001039 | 0.218958 | 6,161 | 107 | 721 | 57.579439 | 0.932253 | 0.026132 | 0 | 0.04 | 0 | 0.1 | 0.705567 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.06 | 0 | 0.22 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c40f520ecf66c3e20c25484ed0cd3108887228b0 | 9,759 | py | Python | AI/pyneogame/Agent/PolicyGradient.py | neodev-ab/NeoGame | f2c0c010b2ea13d99e23ecd2f59592433fbd2385 | [
"MIT"
] | 2 | 2019-08-29T13:52:36.000Z | 2019-11-23T23:41:54.000Z | AI/pyneogame/Agent/PolicyGradient.py | neodev-ab/NeoGame | f2c0c010b2ea13d99e23ecd2f59592433fbd2385 | [
"MIT"
] | 9 | 2019-08-30T08:01:37.000Z | 2021-10-06T06:48:09.000Z | AI/pyneogame/Agent/PolicyGradient.py | neodev-ab/NeoGame | f2c0c010b2ea13d99e23ecd2f59592433fbd2385 | [
"MIT"
] | 2 | 2019-05-12T14:41:42.000Z | 2019-11-23T23:47:24.000Z | import pandas as pd
import numpy as np
from collections import deque
import random
from os import path
from keras import Model
# from keras. models import save_model, load_model
from keras.layers import Input, Dense, Embedding, Flatten, LSTM, Bidirectional
from keras.layers import Dropout
from keras.callbacks import EarlyStopping
from keras.activations import softmax
from keras import regularizers
import tensorflow as tf
from . import DeepQAgent
class ReInforce_v2(DeepQAgent.DeepQAgent):
""" ReInforce PolicyGradient Agent:
Picks cards (two in this case) from the DNN
probability distribution
"""
def __init__(self, state_size, actions,
model=None,
epsilon=0.9,
decay_rate=1e-5,
update_interval=200,
memory_size=10000,
verbose=0):
self.actions = actions
self.actions_size = self.actions.shape[1]
self.verbose = verbose
self.epsilon = epsilon
self.decay_rate = decay_rate
self.memory = deque(maxlen=memory_size)
self.update_dnn_interval = update_interval
self.episode_counter = 0
self.state_size = state_size
self.r_sum = 0
self.avg_r_sum = []
if model is None:
print("Building default model")
self.dnn_model = self._make_model()
else:
self.dnn_model = model
self.save_weights = self.dnn_model.get_weights()
def __str__(self):
return "Policy Gradient Agent: ReInforce"
def reward_loss(self, y_true, y_pred):
# The loss has to deal with the rewards being both positive and negative
_epsilon = tf.convert_to_tensor(tf.keras.backend.epsilon(), y_pred.dtype.base_dtype)
y_pred = tf.clip_by_value(y_pred, _epsilon, 1 - _epsilon)
y_cross = y_true * tf.log(y_pred)
y_crossNeg = -y_true * tf.log(1-y_pred)
bool_idx = tf.greater(y_true, 0)
y_loss = tf.keras.backend.switch(bool_idx, y_cross, y_crossNeg)
result = -tf.reduce_mean(y_loss, 1)
return result
def _make_model(self):
'''Start with a simple default model for now'''
input_layer = Input(shape=(self.state_size,))
embedding = Embedding(input_dim=5, output_dim=3)(input_layer)
flat = Flatten()(embedding)
x = Dense(200, activation='sigmoid')(flat) # input_layer)
x = Dense(200, activation='sigmoid')(x)
x = Dropout(0.1)(x)
action_dist = Dense(self.actions_size, activation='softmax')(x)
model_act = Model(inputs=input_layer, outputs=action_dist)
model_act.compile(loss=self.reward_loss,
optimizer='adam')
if self.verbose:
print(model_act.summary())
return model_act
def get_action(self, state, actions=None,
explore_exploit='none',
as_string=False):
state = state.reshape(1, state.shape[0])
act_dist = self.dnn_model.predict(state)
idx = np.random.choice(range(act_dist.shape[1]),
p=act_dist.ravel(),
size=2,
replace=False)
action = np.zeros(act_dist.shape[1])
action[idx] = 1
return action
def remember(self, state, action, reward, new_state=None, done=None):
self.memory.append((state, action, reward))
def replay_experience(self, batch_size=64, epochs=30):
if self.verbose > 0:
print('Doing replay')
# Extract data from the experience buffer
player_mem = np.asarray(self.memory)
states = np.vstack(player_mem[:, 0])
actions = np.vstack(player_mem[:, 1])
rewards = player_mem[:, 2]
# target = actions
target = actions * rewards[:, np.newaxis]
es = EarlyStopping(monitor='val_loss', mode='min',
verbose=0, patience=2)
# self.dnn_model.set_weights(self.save_weights)
history = self.dnn_model.fit(states,
target,
epochs=epochs,
verbose=0,
batch_size=batch_size,
callbacks=[es],
validation_split=0.10
# sample_weight=np.exp(rewards.astype(float))
)
return history
def get_action_size(self):
return self.actions_size
def input_model(self, model):
if model.optimizer is None:
print("Compiling model, default loss and optimizer")
model.compile(loss=self.reward_loss,
optimizer='adam')
self.dnn_model=model
#self.save_weights = self.dnn_model.get_weights()
class ReInforce(DeepQAgent.DeepQAgent):
""" ReInforce PolicyGradient Agent:
DNN Model returns a probability distribution for the actions,
i.e. all possible ways to select two cards from the given set.
"""
def __init__(self, state_size, actions,
model=None,
epsilon=0.9,
decay_rate=1e-5,
update_interval=200,
memory_size=10000,
verbose=0):
self.actions = actions
# self.actions_size = self.actions.shape[1]
self.verbose = verbose
self.epsilon = epsilon
self.decay_rate = decay_rate
self.memory = deque(maxlen=memory_size)
self.update_dnn_interval = update_interval
self.episode_counter = 0
self.state_size = state_size
self.r_sum = 0
self.avg_r_sum = []
if model is None:
print("Building default model")
self.dnn_model = self._make_model()
else:
self.dnn_model = model
def __str__(self):
return "Policy Gradient Agent: ReInforce"
def reward_loss(self, y_true, y_pred):
# The loss ha to deal with the rewards being both positive and negative
_epsilon = tf.convert_to_tensor(tf.keras.backend.epsilon(),
y_pred.dtype.base_dtype)
y_pred = tf.clip_by_value(y_pred, _epsilon, 1 - _epsilon)
y_cross = y_true * tf.log(y_pred)
y_crossNeg = -y_true * tf.log(1-y_pred)
bool_idx = tf.greater(y_true, 0)
y_loss = tf.keras.backend.switch(bool_idx, y_cross, y_crossNeg)
result = -tf.reduce_mean(y_loss, 1)
return result
def _make_model(self):
'''Start with a simple default model for now'''
input_layer = Input(shape=(self.state_size,))
embedding = Embedding(input_dim=5, output_dim=3)(input_layer)
# lstm = Bidirectional(LSTM(20))(embedding)
flat = Flatten()(embedding)
x = Dense(200,
activation='relu',
# kernel_regularizer=regularizers.l2(0.01)
)(flat) # input_layer)
x = Dense(200,
activation='sigmoid',
# kernel_regularizer=regularizers.l2(0.01)
)(x)
x = Dropout(0.1)(x)
action_dist = Dense(len(self.actions), activation='softmax')(x)
model_act = Model(inputs=input_layer, outputs=action_dist)
model_act.compile(loss=self.reward_loss,
optimizer='adam')
if self.verbose:
print(model_act.summary())
return model_act
def get_action(self, state, actions=None,
explore_exploit='none',
as_string=False):
state = state.reshape(1, state.shape[0])
act_dist = self.dnn_model.predict(state)
idx = np.random.choice(range(act_dist.shape[1]),
p=act_dist.ravel(), size=1)
return self.actions[idx][0]
def remember(self, state, action, reward, new_state=None, done=None):
# Create the action as a one-hot wwhere the action corresponds to the
# right index in action list.
idx = np.where(np.all(self.actions == action, axis=1))[0]
action = np.zeros(len(self.actions))
action[idx] = 1
self.memory.append((state, action, reward))
def replay_experience(self, batch_size=64, epochs=30):
if self.verbose > 0:
print('Doing replay')
mem_len = len(self.memory)
batch_size = batch_size if mem_len > batch_size else mem_len
# Extract data from the experience buffer
player_mem = np.asarray(self.memory)
states = np.vstack(player_mem[:, 0])
actions = np.vstack(player_mem[:, 1])
rewards = player_mem[:, 2]
target = actions * rewards[:, np.newaxis]
es = EarlyStopping(monitor='val_loss', mode='min',
verbose=0, patience=2)
history = self.dnn_model.fit(states,
target,
epochs=epochs,
verbose=0,
batch_size=batch_size,
callbacks=[es],
validation_split=0.10,
# sample_weight=np.exp(rewards.astype(float))
)
return history
def input_model(self, model):
if model.optimizer==None:
print("Compiling model, default loss and optimizer")
model.compile(loss=self.reward_loss,
optimizer='adam')
self.dnn_model=model
| 37.248092 | 92 | 0.566554 | 1,153 | 9,759 | 4.607112 | 0.192541 | 0.021084 | 0.029367 | 0.012801 | 0.810806 | 0.791416 | 0.778614 | 0.763931 | 0.735128 | 0.724962 | 0 | 0.017541 | 0.339891 | 9,759 | 261 | 93 | 37.390805 | 0.807048 | 0.112716 | 0 | 0.761421 | 0 | 0 | 0.035327 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.086294 | false | 0 | 0.06599 | 0.015228 | 0.218274 | 0.040609 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c46c960070d32d1437029cfffaf409fdfa45d7a3 | 16,593 | py | Python | shapenet/models/feature_extractors.py | justusschock/shape-constrained-network | 319c778443fcc76c99da9b8ca21e808b622c803c | [
"MIT"
] | 16 | 2018-09-09T20:38:45.000Z | 2021-05-03T21:23:02.000Z | shapenet/models/feature_extractors.py | justusschock/shape-constrained-network | 319c778443fcc76c99da9b8ca21e808b622c803c | [
"MIT"
] | null | null | null | shapenet/models/feature_extractors.py | justusschock/shape-constrained-network | 319c778443fcc76c99da9b8ca21e808b622c803c | [
"MIT"
] | 2 | 2018-12-10T03:36:09.000Z | 2019-09-23T07:48:15.000Z | import torch
from .abstract_network import AbstractFeatureExtractor
class Conv2dRelu(torch.nn.Module):
"""
Block holding one Conv2d and one ReLU layer
"""
def __init__(self, *args, **kwargs):
"""
Parameters
----------
args: list
positional arguments (passed to Conv2d)
kwargs: dict
keyword arguments (passed to Conv2d)
"""
super().__init__()
self._conv = torch.nn.Conv2d(*args, **kwargs)
self._relu = torch.nn.ReLU()
def forward(self, input_batch):
"""
Forward batch though layers
Parameters
----------
input_batch: torch.Tensor
input batch
Returns
-------
torch.Tensor: result
"""
return self._relu(self._conv(input_batch))
class Conv3dRelu(torch.nn.Module):
"""
Block holding one Conv3d and one ReLU layer
"""
def __init__(self, *args, **kwargs):
"""
Parameters
----------
args: list
positional arguments (passed to Conv3d)
kwargs: dict
keyword arguments (passed to Conv3d)
"""
super().__init__()
self._conv = torch.nn.Conv3d(*args, **kwargs)
self._relu = torch.nn.ReLU()
def forward(self, input_batch):
"""
Forward batch though layers
Parameters
----------
input_batch: torch.Tensor
input batch
Returns
-------
torch.Tensor: result
"""
return self._relu(self._conv(input_batch))
class Img224x224Kernel3x3(AbstractFeatureExtractor):
@staticmethod
def _build_model(in_channels, out_features, norm_class, p_dropout):
"""
Build the actual model structure
Parameters
----------
in_channels: int
number of input channels
out_features: int
number of outputs
norm_class: Any
class implementing a normalization
p_dropout: float
dropout probability
Returns
-------
torch.nn.Module: ensembled model
"""
model = torch.nn.Sequential()
model.add_module("conv_1_1", Conv2dRelu(in_channels, 64, 3, 1))
model.add_module("conv_1_2", Conv2dRelu(64, 64, 3, 1))
model.add_module("down_conv_1", Conv2dRelu(64, 64, 3, 2))
if norm_class is not None:
model.add_module("norm_1", norm_class(64))
if p_dropout:
model.add_module("dropout_1", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_2_1", Conv2dRelu(64, 128, 3, 1))
model.add_module("conv_2_2", Conv2dRelu(128, 128, 3, 1))
model.add_module("down_conv_2", Conv2dRelu(128, 128, 3, 2))
if norm_class is not None:
model.add_module("norm_2", norm_class(128))
if p_dropout:
model.add_module("dropout_2", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_3_1", Conv2dRelu(128, 256, 3, 1))
model.add_module("conv_3_2", Conv2dRelu(256, 256, 3, 1))
model.add_module("conv_3_3", Conv2dRelu(256, 256, 3, 1))
model.add_module("conv_3_4", Conv2dRelu(256, 256, 3, 1))
model.add_module("down_conv_3", Conv2dRelu(256, 256, 3, 2))
if norm_class is not None:
model.add_module("norm_3", norm_class(256))
if p_dropout:
model.add_module("dropout_3", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_4_1", Conv2dRelu(256, 512, 3, 1))
model.add_module("conv_4_2", Conv2dRelu(512, 512, 3, 1))
model.add_module("conv_4_3", Conv2dRelu(512, 512, 3, 1))
model.add_module("conv_4_4", Conv2dRelu(512, 512, 3, 1))
model.add_module("down_conv_4", Conv2dRelu(512, 256, 3, 2))
if norm_class is not None:
model.add_module("norm_4", norm_class(256))
if p_dropout:
model.add_module("dropout_4", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_5_1", Conv2dRelu(256, 128, 3, 1))
model.add_module("conv_6_1", Conv2dRelu(128, 128, 3, 1))
model.add_module("conv_7_1", torch.nn.Conv2d(128, out_features, 2, 1))
return model
class Img224x224Kernel3x3SeparatedDims(AbstractFeatureExtractor):
@staticmethod
def _build_model(in_channels, out_features, norm_class, p_dropout):
"""
Build the actual model structure
Parameters
----------
in_channels: int
number of input channels
out_features: int
number of outputs
norm_class: Any
class implementing a normalization
p_dropout: float
dropout probability
Returns
-------
torch.nn.Module: ensembled model
"""
model = torch.nn.Sequential()
model.add_module("conv_1_1", Conv2dRelu(in_channels, 64, 3, 1))
model.add_module("conv_1_1_2", Conv2dRelu(64, 64, (1, 3), 1))
model.add_module("conv_1_2_1", Conv2dRelu(64, 64, (3, 1), 1))
model.add_module("conv_1_2_2", Conv2dRelu(64, 64, (1, 3), 1))
model.add_module("down_conv_1", Conv2dRelu(64, 64, 3, 2))
if norm_class is not None:
model.add_module("norm_1", norm_class(64))
if p_dropout:
model.add_module("dropout_1", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_2_1_1", Conv2dRelu(64, 128, (3, 1), 1))
model.add_module("conv_2_1_2", Conv2dRelu(128, 128, (1, 3), 1))
model.add_module("conv_2_2_1", Conv2dRelu(128, 128, (3, 1), 1))
model.add_module("conv_2_2_2", Conv2dRelu(128, 128, (1, 3), 1))
model.add_module("down_conv_2", Conv2dRelu(128, 128, 3, 2))
if norm_class is not None:
model.add_module("norm_2", norm_class(128))
if p_dropout:
model.add_module("dropout_2", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_3_1_1", Conv2dRelu(128, 256, (3, 1), 1))
model.add_module("conv_3_1_2", Conv2dRelu(256, 256, (1, 3), 1))
model.add_module("conv_3_2_1", Conv2dRelu(256, 256, (3, 1), 1))
model.add_module("conv_3_2_2", Conv2dRelu(256, 256, (1, 3), 1))
model.add_module("conv_3_3_1", Conv2dRelu(256, 256, (3, 1), 1))
model.add_module("conv_3_3_2", Conv2dRelu(256, 256, (1, 3), 1))
model.add_module("conv_3_4_1", Conv2dRelu(256, 256, (3, 1), 1))
model.add_module("conv_3_4_2", Conv2dRelu(256, 256, (1, 3), 1))
model.add_module("down_conv_3", Conv2dRelu(256, 256, 3, 2))
if norm_class is not None:
model.add_module("norm_3", norm_class(256))
if p_dropout:
model.add_module("dropout_3", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_4_1_1", Conv2dRelu(256, 512, (3, 1), 1))
model.add_module("conv_4_1_2", Conv2dRelu(512, 512, (1, 3), 1))
model.add_module("conv_4_2_1", Conv2dRelu(512, 512, (3, 1), 1))
model.add_module("conv_4_2_2", Conv2dRelu(512, 512, (1, 3), 1))
model.add_module("conv_4_3_1", Conv2dRelu(512, 512, (3, 1), 1))
model.add_module("conv_4_3_2", Conv2dRelu(512, 512, (1, 3), 1))
model.add_module("conv_4_4_1", Conv2dRelu(512, 512, (3, 1), 1))
model.add_module("conv_4_4_2", Conv2dRelu(512, 512, (1, 3), 1))
model.add_module("down_conv_4", Conv2dRelu(512, 256, 3, 2))
if norm_class is not None:
model.add_module("norm_4", norm_class(256))
if p_dropout:
model.add_module("dropout_4", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_5_1_1", Conv2dRelu(256, 128, (3, 1), 1))
model.add_module("conv_5_1_2", Conv2dRelu(128, 128, (1, 3), 1))
model.add_module("conv_6_1_1", Conv2dRelu(128, 128, (3, 1), 1))
model.add_module("conv_6_1_2", Conv2dRelu(128, 128, (1, 3), 1))
model.add_module("conv_7_1", torch.nn.Conv2d(128, out_features, 2, 1))
return model
class Img224x224Kernel7x7SeparatedDims(AbstractFeatureExtractor):
@staticmethod
def _build_model(in_channels, out_params, norm_class, p_dropout):
"""
Build the actual model structure
Parameters
----------
in_channels: int
number of input channels
out_features: int
number of outputs
norm_class: Any
class implementing a normalization
p_dropout: float
dropout probability
Returns
-------
torch.nn.Module: ensembled model
"""
model = torch.nn.Sequential()
model.add_module("conv_1", Conv2dRelu(in_channels, 64, (7, 1)))
model.add_module("conv_2", Conv2dRelu(64, 64, (1, 7)))
model.add_module("down_conv_1", Conv2dRelu(64, 128, (7, 7), stride=2))
if norm_class is not None:
model.add_module("norm_1", norm_class(128))
if p_dropout:
model.add_module("dropout_1", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_3", Conv2dRelu(128, 128, (7, 1)))
model.add_module("conv_4", Conv2dRelu(128, 128, (1, 7)))
model.add_module("down_conv_2", Conv2dRelu(128, 256, (7, 7), stride=2))
if norm_class is not None:
model.add_module("norm_2", norm_class(256))
if p_dropout:
model.add_module("dropout_2", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_5", Conv2dRelu(256, 256, (5, 1)))
model.add_module("conv_6", Conv2dRelu(256, 256, (1, 5)))
model.add_module("down_conv_3", Conv2dRelu(256, 256, (5, 5), stride=2))
if norm_class is not None:
model.add_module("norm_3", norm_class(256))
if p_dropout:
model.add_module("dropout_3", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_7", Conv2dRelu(256, 256, (5, 1)))
model.add_module("conv_8", Conv2dRelu(256, 256, (1, 5)))
model.add_module("down_conv_4", Conv2dRelu(256, 128, (5, 5), stride=2))
if norm_class is not None:
model.add_module("norm_4", norm_class(128))
if p_dropout:
model.add_module("dropout_4", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_9", Conv2dRelu(128, 128, (3, 1)))
model.add_module("conv_10", Conv2dRelu(128, 128, (1, 3)))
model.add_module("conv_11", Conv2dRelu(128, 128, (3, 1)))
model.add_module("conv_12", Conv2dRelu(128, 128, (1, 3)))
model.add_module("final_conv", torch.nn.Conv2d(128, out_params,
(2, 2)))
return model
class Img1024x1024Kernel9x9SeparatedDims(AbstractFeatureExtractor):
@staticmethod
def _build_model(in_channels, out_features, norm_class, p_dropout):
"""
Build the actual model structure
Parameters
----------
in_channels: int
number of input channels
out_features: int
number of outputs
norm_class: Any
class implementing a normalization
p_dropout: float
dropout probability
Returns
-------
torch.nn.Module: ensembled model
"""
model = torch.nn.Sequential()
model.add_module("conv_1", Conv2dRelu(in_channels, 64, (9, 1)))
model.add_module("conv_2", Conv2dRelu(64, 64, (1, 9)))
model.add_module("down_conv_1", Conv2dRelu(64, 128, (9, 9), stride=3))
if norm_class is not None:
model.add_module("norm_1", norm_class(128))
if p_dropout:
model.add_module("dropout_1", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_3", Conv2dRelu(128, 128, (9, 1)))
model.add_module("conv_4", Conv2dRelu(128, 128, (1, 9)))
model.add_module("down_conv_2", Conv2dRelu(128, 256, (7, 7), stride=3))
if norm_class is not None:
model.add_module("norm_2", norm_class(256))
if p_dropout:
model.add_module("dropout_2", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_5", Conv2dRelu(256, 256, (7, 1)))
model.add_module("conv_6", Conv2dRelu(256, 256, (1, 7)))
model.add_module("down_conv_3", Conv2dRelu(256, 256, (5, 5), stride=3))
if norm_class is not None:
model.add_module("norm_3", norm_class(256))
if p_dropout:
model.add_module("dropout_3", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_7", Conv2dRelu(256, 256, (5, 1)))
model.add_module("conv_8", Conv2dRelu(256, 256, (1, 5)))
model.add_module("down_conv_4", Conv2dRelu(256, 128, (5, 5), stride=2))
if norm_class is not None:
model.add_module("norm_4", norm_class(128))
if p_dropout:
model.add_module("dropout_4", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_9", Conv2dRelu(128, 128, (3, 1)))
model.add_module("conv_10", Conv2dRelu(128, 128, (1, 3)))
model.add_module("down_conv_5", Conv2dRelu(128, 128, (3, 3), stride=2))
model.add_module("conv_11", Conv2dRelu(128, 128, (3, 1)))
model.add_module("conv_12", Conv2dRelu(128, 128, (1, 3)))
model.add_module("final_conv", torch.nn.Conv2d(128, out_features,
(3, 3)))
return model
class Img512x512Kernel9x9SeparatedDims(AbstractFeatureExtractor):
@staticmethod
def _build_model(in_channels, out_features, norm_class, p_dropout):
"""
Build the actual model structure
Parameters
----------
in_channels: int
number of input channels
out_features: int
number of outputs
norm_class: Any
class implementing a normalization
p_dropout: float
dropout probability
Returns
-------
torch.nn.Module: ensembled model
"""
model = torch.nn.Sequential()
model.add_module("conv_1", Conv2dRelu(in_channels, 64, (9, 1)))
model.add_module("conv_2", Conv2dRelu(64, 64, (1, 9)))
model.add_module("down_conv_1", Conv2dRelu(64, 128, (9, 9),
stride=3))
if norm_class is not None:
model.add_module("norm_1", norm_class(128))
if p_dropout:
model.add_module("dropout_1", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_3", Conv2dRelu(128, 128, (9, 1)))
model.add_module("conv_4", Conv2dRelu(128, 128, (1, 9)))
model.add_module("down_conv_2", Conv2dRelu(128, 256, (7, 7),
stride=3))
if norm_class is not None:
model.add_module("norm_2", norm_class(256))
if p_dropout:
model.add_module("dropout_2", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_5", Conv2dRelu(256, 256, (7, 1)))
model.add_module("conv_6", Conv2dRelu(256, 256, (1, 7)))
model.add_module("down_conv_3", Conv2dRelu(256, 256, (5, 5),
stride=3))
if norm_class is not None:
model.add_module("norm_3", norm_class(256))
if p_dropout:
model.add_module("dropout_3", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_7", Conv2dRelu(256, 256, (5, 1)))
model.add_module("conv_8", Conv2dRelu(256, 256, (1, 5)))
model.add_module("down_conv_4", Conv2dRelu(256, 128, (5, 5),
stride=2))
if norm_class is not None:
model.add_module("norm_4", norm_class(128))
if p_dropout:
model.add_module("dropout_4", torch.nn.Dropout2d(p_dropout))
model.add_module("conv_9", Conv2dRelu(128, 128, (3, 1)))
model.add_module("conv_10", Conv2dRelu(128, 128, (1, 3)))
model.add_module("down_conv_5", Conv2dRelu(128, 128, (3, 3),
stride=2))
model.add_module("conv_11", Conv2dRelu(128, 128, (3, 1)))
model.add_module("conv_12", Conv2dRelu(128, 128, (1, 3)))
model.add_module("final_conv", torch.nn.Conv2d(128, out_features,
(3, 3)))
return model
| 36.955457 | 79 | 0.577292 | 2,203 | 16,593 | 4.096686 | 0.044939 | 0.128532 | 0.224931 | 0.159557 | 0.964875 | 0.961884 | 0.930194 | 0.925873 | 0.898283 | 0.883767 | 0 | 0.108631 | 0.289881 | 16,593 | 448 | 80 | 37.037946 | 0.657303 | 0.123606 | 0 | 0.692308 | 0 | 0 | 0.088946 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038462 | false | 0 | 0.008547 | 0 | 0.106838 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
672198e0c6c4df7746ca08da31c8675ef4efc6f5 | 121,986 | py | Python | nova/tests/unit/test_utils.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/test_utils.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/test_utils.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Copyright 2011 Justin Santa Barbara'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'datetime'
newline|'\n'
name|'import'
name|'hashlib'
newline|'\n'
name|'import'
name|'importlib'
newline|'\n'
name|'import'
name|'logging'
newline|'\n'
name|'import'
name|'os'
newline|'\n'
name|'import'
name|'os'
op|'.'
name|'path'
newline|'\n'
name|'import'
name|'socket'
newline|'\n'
name|'import'
name|'struct'
newline|'\n'
name|'import'
name|'tempfile'
newline|'\n'
nl|'\n'
name|'import'
name|'eventlet'
newline|'\n'
name|'import'
name|'mock'
newline|'\n'
name|'import'
name|'netaddr'
newline|'\n'
name|'from'
name|'oslo_concurrency'
name|'import'
name|'processutils'
newline|'\n'
name|'from'
name|'oslo_config'
name|'import'
name|'cfg'
newline|'\n'
name|'from'
name|'oslo_context'
name|'import'
name|'context'
name|'as'
name|'common_context'
newline|'\n'
name|'from'
name|'oslo_context'
name|'import'
name|'fixture'
name|'as'
name|'context_fixture'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'encodeutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'fixture'
name|'as'
name|'utils_fixture'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'units'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
nl|'\n'
name|'import'
name|'nova'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'context'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'utils'
newline|'\n'
nl|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'cfg'
op|'.'
name|'CONF'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|GenericUtilsTestCase
name|'class'
name|'GenericUtilsTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_parse_server_string
indent|' '
name|'def'
name|'test_parse_server_string'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'::1'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'::1'"
op|','
string|"''"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'[::1]:8773'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'::1'"
op|','
string|"'8773'"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'2001:db8::192.168.1.1'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'2001:db8::192.168.1.1'"
op|','
string|"''"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'[2001:db8::192.168.1.1]:8773'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'2001:db8::192.168.1.1'"
op|','
string|"'8773'"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'192.168.1.1'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'192.168.1.1'"
op|','
string|"''"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'192.168.1.2:8773'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'192.168.1.2'"
op|','
string|"'8773'"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'192.168.1.3'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'192.168.1.3'"
op|','
string|"''"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'www.example.com:8443'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'www.example.com'"
op|','
string|"'8443'"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'www.example.com'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"'www.example.com'"
op|','
string|"''"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
comment|'# error case'
nl|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"'www.exa:mple.com:8443'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"''"
op|','
string|"''"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
name|'result'
op|'='
name|'utils'
op|'.'
name|'parse_server_string'
op|'('
string|"''"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
string|"''"
op|','
string|"''"
op|')'
op|','
name|'result'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_unicode_sanitization
dedent|''
name|'def'
name|'test_hostname_unicode_sanitization'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'u"\\u7684.test.example.com"'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"test.example.com"'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_sanitize_periods
dedent|''
name|'def'
name|'test_hostname_sanitize_periods'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'"....test.example.com..."'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"test.example.com"'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_sanitize_dashes
dedent|''
name|'def'
name|'test_hostname_sanitize_dashes'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'"----test.example.com---"'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"test.example.com"'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_sanitize_characters
dedent|''
name|'def'
name|'test_hostname_sanitize_characters'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'"(#@&$!(@*--#&91)(__=+--test-host.example!!.com-0+"'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"91----test-host.example.com-0"'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_translate
dedent|''
name|'def'
name|'test_hostname_translate'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'"<}\\x1fh\\x10e\\x08l\\x02l\\x05o\\x12!{>"'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"hello"'
op|','
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_has_default
dedent|''
name|'def'
name|'test_hostname_has_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'u"\\u7684hello"'
newline|'\n'
name|'defaultname'
op|'='
string|'"Server-1"'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"hello"'
op|','
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|','
nl|'\n'
name|'defaultname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_empty_has_default
dedent|''
name|'def'
name|'test_hostname_empty_has_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'u"\\u7684"'
newline|'\n'
name|'defaultname'
op|'='
string|'"Server-1"'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'defaultname'
op|','
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|','
nl|'\n'
name|'defaultname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_empty_has_default_too_long
dedent|''
name|'def'
name|'test_hostname_empty_has_default_too_long'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'u"\\u7684"'
newline|'\n'
name|'defaultname'
op|'='
string|'"a"'
op|'*'
number|'64'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"a"'
op|'*'
number|'63'
op|','
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|','
nl|'\n'
name|'defaultname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_empty_no_default
dedent|''
name|'def'
name|'test_hostname_empty_no_default'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'u"\\u7684"'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'""'
op|','
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_empty_minus_period
dedent|''
name|'def'
name|'test_hostname_empty_minus_period'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'"---..."'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'""'
op|','
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_with_space
dedent|''
name|'def'
name|'test_hostname_with_space'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'" a b c "'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"a-b-c"'
op|','
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_too_long
dedent|''
name|'def'
name|'test_hostname_too_long'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'"a"'
op|'*'
number|'64'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'63'
op|','
name|'len'
op|'('
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hostname_truncated_no_hyphen
dedent|''
name|'def'
name|'test_hostname_truncated_no_hyphen'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'hostname'
op|'='
string|'"a"'
op|'*'
number|'62'
newline|'\n'
name|'hostname'
op|'='
name|'hostname'
op|'+'
string|"'-'"
op|'+'
string|"'a'"
newline|'\n'
name|'res'
op|'='
name|'utils'
op|'.'
name|'sanitize_hostname'
op|'('
name|'hostname'
op|')'
newline|'\n'
comment|'# we trim to 63 and then trim the trailing dash'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'62'
op|','
name|'len'
op|'('
name|'res'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'res'
op|'.'
name|'endswith'
op|'('
string|"'-'"
op|')'
op|','
string|"'The hostname ends with a -'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_generate_password
dedent|''
name|'def'
name|'test_generate_password'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'password'
op|'='
name|'utils'
op|'.'
name|'generate_password'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
op|'['
name|'c'
name|'for'
name|'c'
name|'in'
name|'password'
name|'if'
name|'c'
name|'in'
string|"'0123456789'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
op|'['
name|'c'
name|'for'
name|'c'
name|'in'
name|'password'
nl|'\n'
name|'if'
name|'c'
name|'in'
string|"'abcdefghijklmnopqrstuvwxyz'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
op|'['
name|'c'
name|'for'
name|'c'
name|'in'
name|'password'
nl|'\n'
name|'if'
name|'c'
name|'in'
string|"'ABCDEFGHIJKLMNOPQRSTUVWXYZ'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_read_file_as_root
dedent|''
name|'def'
name|'test_read_file_as_root'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_execute
indent|' '
name|'def'
name|'fake_execute'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'args'
op|'['
number|'1'
op|']'
op|'=='
string|"'bad'"
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|'('
op|')'
newline|'\n'
dedent|''
name|'return'
string|"'fakecontents'"
op|','
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.utils.execute'"
op|','
name|'fake_execute'
op|')'
newline|'\n'
name|'contents'
op|'='
name|'utils'
op|'.'
name|'read_file_as_root'
op|'('
string|"'good'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'contents'
op|','
string|"'fakecontents'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'FileNotFound'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'read_file_as_root'
op|','
string|"'bad'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_temporary_chown
dedent|''
name|'def'
name|'test_temporary_chown'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|fake_execute
indent|' '
name|'def'
name|'fake_execute'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'args'
op|'['
number|'0'
op|']'
op|'=='
string|"'chown'"
op|':'
newline|'\n'
indent|' '
name|'fake_execute'
op|'.'
name|'uid'
op|'='
name|'args'
op|'['
number|'1'
op|']'
newline|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'stub_out'
op|'('
string|"'nova.utils.execute'"
op|','
name|'fake_execute'
op|')'
newline|'\n'
nl|'\n'
name|'with'
name|'tempfile'
op|'.'
name|'NamedTemporaryFile'
op|'('
op|')'
name|'as'
name|'f'
op|':'
newline|'\n'
indent|' '
name|'with'
name|'utils'
op|'.'
name|'temporary_chown'
op|'('
name|'f'
op|'.'
name|'name'
op|','
name|'owner_uid'
op|'='
number|'2'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'fake_execute'
op|'.'
name|'uid'
op|','
number|'2'
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'fake_execute'
op|'.'
name|'uid'
op|','
name|'os'
op|'.'
name|'getuid'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_xhtml_escape
dedent|''
dedent|''
name|'def'
name|'test_xhtml_escape'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'"foo"'"
op|','
name|'utils'
op|'.'
name|'xhtml_escape'
op|'('
string|'\'"foo"\''
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"''foo''"
op|','
name|'utils'
op|'.'
name|'xhtml_escape'
op|'('
string|'"\'foo\'"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'&'"
op|','
name|'utils'
op|'.'
name|'xhtml_escape'
op|'('
string|"'&'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'>'"
op|','
name|'utils'
op|'.'
name|'xhtml_escape'
op|'('
string|"'>'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'<'"
op|','
name|'utils'
op|'.'
name|'xhtml_escape'
op|'('
string|"'<'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'<foo>'"
op|','
name|'utils'
op|'.'
name|'xhtml_escape'
op|'('
string|"'<foo>'"
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_valid_ipv6_cidr
dedent|''
name|'def'
name|'test_is_valid_ipv6_cidr'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'utils'
op|'.'
name|'is_valid_ipv6_cidr'
op|'('
string|'"2600::/64"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'utils'
op|'.'
name|'is_valid_ipv6_cidr'
op|'('
nl|'\n'
string|'"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254/48"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'utils'
op|'.'
name|'is_valid_ipv6_cidr'
op|'('
nl|'\n'
string|'"0000:0000:0000:0000:0000:0000:0000:0001/32"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'utils'
op|'.'
name|'is_valid_ipv6_cidr'
op|'('
nl|'\n'
string|'"0000:0000:0000:0000:0000:0000:0000:0001"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'utils'
op|'.'
name|'is_valid_ipv6_cidr'
op|'('
string|'"foo"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'utils'
op|'.'
name|'is_valid_ipv6_cidr'
op|'('
string|'"127.0.0.1"'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_shortened_ipv6
dedent|''
name|'def'
name|'test_get_shortened_ipv6'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"abcd:ef01:2345:6789:abcd:ef01:c0a8:fefe"'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'get_shortened_ipv6'
op|'('
nl|'\n'
string|'"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"::1"'
op|','
name|'utils'
op|'.'
name|'get_shortened_ipv6'
op|'('
nl|'\n'
string|'"0000:0000:0000:0000:0000:0000:0000:0001"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"caca::caca:0:babe:201:102"'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'get_shortened_ipv6'
op|'('
nl|'\n'
string|'"caca:0000:0000:caca:0000:babe:0201:0102"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'netaddr'
op|'.'
name|'AddrFormatError'
op|','
name|'utils'
op|'.'
name|'get_shortened_ipv6'
op|','
nl|'\n'
string|'"127.0.0.1"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'netaddr'
op|'.'
name|'AddrFormatError'
op|','
name|'utils'
op|'.'
name|'get_shortened_ipv6'
op|','
nl|'\n'
string|'"failure"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_shortened_ipv6_cidr
dedent|''
name|'def'
name|'test_get_shortened_ipv6_cidr'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"2600::/64"'
op|','
name|'utils'
op|'.'
name|'get_shortened_ipv6_cidr'
op|'('
nl|'\n'
string|'"2600:0000:0000:0000:0000:0000:0000:0000/64"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"2600::/64"'
op|','
name|'utils'
op|'.'
name|'get_shortened_ipv6_cidr'
op|'('
nl|'\n'
string|'"2600::1/64"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'netaddr'
op|'.'
name|'AddrFormatError'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'get_shortened_ipv6_cidr'
op|','
nl|'\n'
string|'"127.0.0.1"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'netaddr'
op|'.'
name|'AddrFormatError'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'get_shortened_ipv6_cidr'
op|','
nl|'\n'
string|'"failure"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_safe_ip_format
dedent|''
name|'def'
name|'test_safe_ip_format'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"[::1]"'
op|','
name|'utils'
op|'.'
name|'safe_ip_format'
op|'('
string|'"::1"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"127.0.0.1"'
op|','
name|'utils'
op|'.'
name|'safe_ip_format'
op|'('
string|'"127.0.0.1"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"[::ffff:127.0.0.1]"'
op|','
name|'utils'
op|'.'
name|'safe_ip_format'
op|'('
nl|'\n'
string|'"::ffff:127.0.0.1"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"localhost"'
op|','
name|'utils'
op|'.'
name|'safe_ip_format'
op|'('
string|'"localhost"'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_get_hash_str
dedent|''
name|'def'
name|'test_get_hash_str'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'base_str'
op|'='
string|'b"foo"'
newline|'\n'
name|'base_unicode'
op|'='
string|'u"foo"'
newline|'\n'
name|'value'
op|'='
name|'hashlib'
op|'.'
name|'md5'
op|'('
name|'base_str'
op|')'
op|'.'
name|'hexdigest'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'value'
op|','
name|'utils'
op|'.'
name|'get_hash_str'
op|'('
name|'base_str'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'value'
op|','
name|'utils'
op|'.'
name|'get_hash_str'
op|'('
name|'base_unicode'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_use_rootwrap
dedent|''
name|'def'
name|'test_use_rootwrap'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'disable_rootwrap'
op|'='
name|'False'
op|','
name|'group'
op|'='
string|"'workarounds'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'rootwrap_config'
op|'='
string|"'foo'"
op|')'
newline|'\n'
name|'cmd'
op|'='
name|'utils'
op|'.'
name|'get_root_helper'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'sudo nova-rootwrap foo'"
op|','
name|'cmd'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.RootwrapProcessHelper'"
op|')'
newline|'\n'
DECL|member|test_get_root_helper_proc
name|'def'
name|'test_get_root_helper_proc'
op|'('
name|'self'
op|','
name|'mock_proc_helper'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_rootwrap_daemon'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'rootwrap_config'
op|'='
string|'"/path/to/conf"'
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'_get_rootwrap_helper'
op|'('
op|')'
newline|'\n'
name|'mock_proc_helper'
op|'.'
name|'assert_called_once_with'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.RootwrapDaemonHelper'"
op|')'
newline|'\n'
DECL|member|test_get_root_helper_daemon
name|'def'
name|'test_get_root_helper_daemon'
op|'('
name|'self'
op|','
name|'mock_daemon_helper'
op|')'
op|':'
newline|'\n'
indent|' '
name|'conf_path'
op|'='
string|"'/path/to/conf'"
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_rootwrap_daemon'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
name|'rootwrap_config'
op|'='
name|'conf_path'
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'_get_rootwrap_helper'
op|'('
op|')'
newline|'\n'
name|'mock_daemon_helper'
op|'.'
name|'assert_called_once_with'
op|'('
name|'conf_path'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_use_sudo
dedent|''
name|'def'
name|'test_use_sudo'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'disable_rootwrap'
op|'='
name|'True'
op|','
name|'group'
op|'='
string|"'workarounds'"
op|')'
newline|'\n'
name|'cmd'
op|'='
name|'utils'
op|'.'
name|'get_root_helper'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'sudo'"
op|','
name|'cmd'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ssh_execute
dedent|''
name|'def'
name|'test_ssh_execute'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'expected_args'
op|'='
op|'('
string|"'ssh'"
op|','
string|"'-o'"
op|','
string|"'BatchMode=yes'"
op|','
nl|'\n'
string|"'remotehost'"
op|','
string|"'ls'"
op|','
string|"'-l'"
op|')'
newline|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.execute'"
op|')'
name|'as'
name|'mock_method'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'ssh_execute'
op|'('
string|"'remotehost'"
op|','
string|"'ls'"
op|','
string|"'-l'"
op|')'
newline|'\n'
dedent|''
name|'mock_method'
op|'.'
name|'assert_called_once_with'
op|'('
op|'*'
name|'expected_args'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestCachedFile
dedent|''
dedent|''
name|'class'
name|'TestCachedFile'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.getmtime'"
op|','
name|'return_value'
op|'='
number|'1'
op|')'
newline|'\n'
DECL|member|test_read_cached_file
name|'def'
name|'test_read_cached_file'
op|'('
name|'self'
op|','
name|'getmtime'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|'='
op|'{'
nl|'\n'
string|"'/this/is/a/fake'"
op|':'
op|'{'
string|'"data"'
op|':'
number|'1123'
op|','
string|'"mtime"'
op|':'
number|'1'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'fresh'
op|','
name|'data'
op|'='
name|'utils'
op|'.'
name|'read_cached_file'
op|'('
string|'"/this/is/a/fake"'
op|')'
newline|'\n'
name|'fdata'
op|'='
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|'['
string|"'/this/is/a/fake'"
op|']'
op|'['
string|'"data"'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'fdata'
op|','
name|'data'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'os.path.getmtime'"
op|','
name|'return_value'
op|'='
number|'2'
op|')'
newline|'\n'
DECL|member|test_read_modified_cached_file
name|'def'
name|'test_read_modified_cached_file'
op|'('
name|'self'
op|','
name|'getmtime'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|'='
op|'{'
nl|'\n'
string|"'/this/is/a/fake'"
op|':'
op|'{'
string|'"data"'
op|':'
number|'1123'
op|','
string|'"mtime"'
op|':'
number|'1'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'fake_contents'
op|'='
string|'"lorem ipsum"'
newline|'\n'
nl|'\n'
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'six.moves.builtins.open'"
op|','
nl|'\n'
name|'mock'
op|'.'
name|'mock_open'
op|'('
name|'read_data'
op|'='
name|'fake_contents'
op|')'
op|')'
op|':'
newline|'\n'
indent|' '
name|'fresh'
op|','
name|'data'
op|'='
name|'utils'
op|'.'
name|'read_cached_file'
op|'('
string|'"/this/is/a/fake"'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'data'
op|','
name|'fake_contents'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'fresh'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_delete_cached_file
dedent|''
name|'def'
name|'test_delete_cached_file'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'filename'
op|'='
string|"'/this/is/a/fake/deletion/of/cached/file'"
newline|'\n'
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|'='
op|'{'
nl|'\n'
name|'filename'
op|':'
op|'{'
string|'"data"'
op|':'
number|'1123'
op|','
string|'"mtime"'
op|':'
number|'1'
op|'}'
nl|'\n'
op|'}'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'filename'
op|','
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'delete_cached_file'
op|'('
name|'filename'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'filename'
op|','
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_delete_cached_file_not_exist
dedent|''
name|'def'
name|'test_delete_cached_file_not_exist'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# We expect that if cached file does not exist no Exception raised.'
nl|'\n'
indent|' '
name|'filename'
op|'='
string|"'/this/is/a/fake/deletion/attempt/of/not/cached/file'"
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'filename'
op|','
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'delete_cached_file'
op|'('
name|'filename'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'filename'
op|','
name|'utils'
op|'.'
name|'_FILE_CACHE'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|RootwrapDaemonTesetCase
dedent|''
dedent|''
name|'class'
name|'RootwrapDaemonTesetCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'oslo_rootwrap.client.Client'"
op|')'
newline|'\n'
DECL|member|test_get_client
name|'def'
name|'test_get_client'
op|'('
name|'self'
op|','
name|'mock_client'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'mock_client'
op|'.'
name|'assert_called_once_with'
op|'('
nl|'\n'
op|'['
string|'"sudo"'
op|','
string|'"nova-rootwrap-daemon"'
op|','
name|'mock_conf'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.LOG.info'"
op|')'
newline|'\n'
DECL|member|test_execute
name|'def'
name|'test_execute'
op|'('
name|'self'
op|','
name|'mock_info'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
number|'0'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'daemon'
op|'.'
name|'execute'
op|'('
string|"'a'"
op|','
number|'1'
op|','
name|'foo'
op|'='
string|"'bar'"
op|','
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'.'
name|'assert_called_once_with'
op|'('
op|'['
string|"'a'"
op|','
string|"'1'"
op|']'
op|','
name|'None'
op|')'
newline|'\n'
name|'mock_info'
op|'.'
name|'assert_has_calls'
op|'('
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
nl|'\n'
string|"u'Executing RootwrapDaemonHelper.execute cmd=[%(cmd)r] '"
nl|'\n'
string|"u'kwargs=[%(kwargs)r]'"
op|','
nl|'\n'
op|'{'
string|"'cmd'"
op|':'
string|"u'a 1'"
op|','
string|"'kwargs'"
op|':'
op|'{'
string|"'run_as_root'"
op|':'
name|'True'
op|','
string|"'foo'"
op|':'
string|"'bar'"
op|'}'
op|'}'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_execute_with_kwargs
dedent|''
name|'def'
name|'test_execute_with_kwargs'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
number|'0'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'daemon'
op|'.'
name|'execute'
op|'('
string|"'a'"
op|','
number|'1'
op|','
name|'foo'
op|'='
string|"'bar'"
op|','
name|'run_as_root'
op|'='
name|'True'
op|','
name|'process_input'
op|'='
name|'True'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'.'
name|'assert_called_once_with'
op|'('
op|'['
string|"'a'"
op|','
string|"'1'"
op|']'
op|','
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_execute_fail
dedent|''
name|'def'
name|'test_execute_fail'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
op|'-'
number|'2'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|','
nl|'\n'
name|'daemon'
op|'.'
name|'execute'
op|','
string|"'b'"
op|','
number|'2'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_execute_pass_with_check_exit_code
dedent|''
name|'def'
name|'test_execute_pass_with_check_exit_code'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
op|'-'
number|'2'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'execute'
op|'('
string|"'b'"
op|','
number|'2'
op|','
name|'check_exit_code'
op|'='
op|'['
op|'-'
number|'2'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_execute_fail_with_retry
dedent|''
name|'def'
name|'test_execute_fail_with_retry'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
op|'-'
number|'2'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|','
nl|'\n'
name|'daemon'
op|'.'
name|'execute'
op|','
string|"'b'"
op|','
number|'2'
op|','
name|'attempts'
op|'='
number|'2'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'.'
name|'assert_has_calls'
op|'('
nl|'\n'
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
op|'['
string|"'b'"
op|','
string|"'2'"
op|']'
op|','
name|'None'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
op|'['
string|"'b'"
op|','
string|"'2'"
op|']'
op|','
name|'None'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.LOG.log'"
op|')'
newline|'\n'
DECL|member|test_execute_fail_and_logging
name|'def'
name|'test_execute_fail_and_logging'
op|'('
name|'self'
op|','
name|'mock_log'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
op|'-'
number|'2'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'processutils'
op|'.'
name|'ProcessExecutionError'
op|','
nl|'\n'
name|'daemon'
op|'.'
name|'execute'
op|','
string|"'b'"
op|','
number|'2'
op|','
nl|'\n'
name|'attempts'
op|'='
number|'2'
op|','
nl|'\n'
name|'loglevel'
op|'='
name|'logging'
op|'.'
name|'CRITICAL'
op|','
nl|'\n'
name|'log_errors'
op|'='
name|'processutils'
op|'.'
name|'LOG_ALL_ERRORS'
op|')'
newline|'\n'
name|'mock_log'
op|'.'
name|'assert_has_calls'
op|'('
nl|'\n'
op|'['
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
string|"u'Running cmd (subprocess): %s'"
op|','
nl|'\n'
string|"u'b 2'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
nl|'\n'
string|'\'CMD "%(sanitized_cmd)s" returned: %(return_code)s \''
nl|'\n'
string|"'in %(end_time)0.3fs'"
op|','
nl|'\n'
op|'{'
string|"'sanitized_cmd'"
op|':'
string|"u'b 2'"
op|','
string|"'return_code'"
op|':'
op|'-'
number|'2'
op|','
nl|'\n'
string|"'end_time'"
op|':'
name|'mock'
op|'.'
name|'ANY'
op|'}'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
nl|'\n'
string|"u'%(desc)r\\ncommand: %(cmd)r\\nexit code: %(code)r'"
nl|'\n'
string|"u'\\nstdout: %(stdout)r\\nstderr: %(stderr)r'"
op|','
nl|'\n'
op|'{'
string|"'code'"
op|':'
op|'-'
number|'2'
op|','
string|"'cmd'"
op|':'
string|"u'b 2'"
op|','
string|"'stdout'"
op|':'
string|"u'None'"
op|','
nl|'\n'
string|"'stderr'"
op|':'
string|"u'None'"
op|','
string|"'desc'"
op|':'
name|'None'
op|'}'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
string|"u'%r failed. Retrying.'"
op|','
string|"u'b 2'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
string|"u'Running cmd (subprocess): %s'"
op|','
nl|'\n'
string|"u'b 2'"
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
nl|'\n'
string|'\'CMD "%(sanitized_cmd)s" returned: %(return_code)s \''
nl|'\n'
string|"'in %(end_time)0.3fs'"
op|','
nl|'\n'
op|'{'
string|"'sanitized_cmd'"
op|':'
string|"u'b 2'"
op|','
string|"'return_code'"
op|':'
op|'-'
number|'2'
op|','
nl|'\n'
string|"'end_time'"
op|':'
name|'mock'
op|'.'
name|'ANY'
op|'}'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
nl|'\n'
string|"u'%(desc)r\\ncommand: %(cmd)r\\nexit code: %(code)r'"
nl|'\n'
string|"u'\\nstdout: %(stdout)r\\nstderr: %(stderr)r'"
op|','
nl|'\n'
op|'{'
string|"'code'"
op|':'
op|'-'
number|'2'
op|','
string|"'cmd'"
op|':'
string|"u'b 2'"
op|','
string|"'stdout'"
op|':'
string|"u'None'"
op|','
nl|'\n'
string|"'stderr'"
op|':'
string|"u'None'"
op|','
string|"'desc'"
op|':'
name|'None'
op|'}'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
name|'logging'
op|'.'
name|'CRITICAL'
op|','
string|"u'%r failed. Not Retrying.'"
op|','
nl|'\n'
string|"u'b 2'"
op|')'
op|']'
nl|'\n'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_trycmd
dedent|''
name|'def'
name|'test_trycmd'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
number|'0'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'daemon'
op|'.'
name|'trycmd'
op|'('
string|"'a'"
op|','
number|'1'
op|','
name|'foo'
op|'='
string|"'bar'"
op|','
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'.'
name|'assert_called_once_with'
op|'('
op|'['
string|"'a'"
op|','
string|"'1'"
op|']'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_trycmd_with_kwargs
dedent|''
name|'def'
name|'test_trycmd_with_kwargs'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
string|"'out'"
op|','
string|"'err'"
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'daemon'
op|'.'
name|'trycmd'
op|'('
string|"'a'"
op|','
number|'1'
op|','
name|'foo'
op|'='
string|"'bar'"
op|','
name|'run_as_root'
op|'='
name|'True'
op|','
nl|'\n'
name|'loglevel'
op|'='
name|'logging'
op|'.'
name|'WARN'
op|','
nl|'\n'
name|'log_errors'
op|'='
name|'True'
op|','
nl|'\n'
name|'process_input'
op|'='
name|'True'
op|','
nl|'\n'
name|'delay_on_retry'
op|'='
name|'False'
op|','
nl|'\n'
name|'attempts'
op|'='
number|'5'
op|','
nl|'\n'
name|'check_exit_code'
op|'='
op|'['
number|'200'
op|']'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'a'"
op|','
number|'1'
op|','
name|'attempts'
op|'='
number|'5'
op|','
nl|'\n'
name|'check_exit_code'
op|'='
op|'['
number|'200'
op|']'
op|','
nl|'\n'
name|'delay_on_retry'
op|'='
name|'False'
op|','
name|'foo'
op|'='
string|"'bar'"
op|','
nl|'\n'
name|'log_errors'
op|'='
name|'True'
op|','
name|'loglevel'
op|'='
number|'30'
op|','
nl|'\n'
name|'process_input'
op|'='
name|'True'
op|','
nl|'\n'
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_trycmd_fail
dedent|''
name|'def'
name|'test_trycmd_fail'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
op|'-'
number|'2'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'expected_err'
op|'='
name|'six'
op|'.'
name|'text_type'
op|'('
string|"'''\\\nUnexpected error while running command.\nCommand: a 1\nExit code: -2'''"
op|')'
newline|'\n'
nl|'\n'
name|'out'
op|','
name|'err'
op|'='
name|'daemon'
op|'.'
name|'trycmd'
op|'('
string|"'a'"
op|','
number|'1'
op|','
name|'foo'
op|'='
string|"'bar'"
op|','
name|'run_as_root'
op|'='
name|'True'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'.'
name|'assert_called_once_with'
op|'('
op|'['
string|"'a'"
op|','
string|"'1'"
op|']'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'expected_err'
op|','
name|'err'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_trycmd_fail_with_rety
dedent|''
name|'def'
name|'test_trycmd_fail_with_rety'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'mock_conf'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'='
name|'utils'
op|'.'
name|'RootwrapDaemonHelper'
op|'('
name|'mock_conf'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'='
name|'mock'
op|'.'
name|'Mock'
op|'('
name|'return_value'
op|'='
op|'('
op|'-'
number|'2'
op|','
name|'None'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'expected_err'
op|'='
name|'six'
op|'.'
name|'text_type'
op|'('
string|"'''\\\nUnexpected error while running command.\nCommand: a 1\nExit code: -2'''"
op|')'
newline|'\n'
nl|'\n'
name|'out'
op|','
name|'err'
op|'='
name|'daemon'
op|'.'
name|'trycmd'
op|'('
string|"'a'"
op|','
number|'1'
op|','
name|'foo'
op|'='
string|"'bar'"
op|','
name|'run_as_root'
op|'='
name|'True'
op|','
nl|'\n'
name|'attempts'
op|'='
number|'3'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'expected_err'
op|','
name|'err'
op|')'
newline|'\n'
name|'daemon'
op|'.'
name|'client'
op|'.'
name|'execute'
op|'.'
name|'assert_has_calls'
op|'('
nl|'\n'
op|'['
name|'mock'
op|'.'
name|'call'
op|'('
op|'['
string|"'a'"
op|','
string|"'1'"
op|']'
op|','
name|'None'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
op|'['
string|"'a'"
op|','
string|"'1'"
op|']'
op|','
name|'None'
op|')'
op|','
nl|'\n'
name|'mock'
op|'.'
name|'call'
op|'('
op|'['
string|"'a'"
op|','
string|"'1'"
op|']'
op|','
name|'None'
op|')'
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VPNPingTestCase
dedent|''
dedent|''
name|'class'
name|'VPNPingTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Unit tests for utils.vpn_ping()."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VPNPingTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'port'
op|'='
string|"'fake'"
newline|'\n'
name|'self'
op|'.'
name|'address'
op|'='
string|"'fake'"
newline|'\n'
name|'self'
op|'.'
name|'session_id'
op|'='
number|'0x1234'
newline|'\n'
name|'self'
op|'.'
name|'fmt'
op|'='
string|"'!BQxxxxxQxxxx'"
newline|'\n'
nl|'\n'
DECL|member|fake_reply_packet
dedent|''
name|'def'
name|'fake_reply_packet'
op|'('
name|'self'
op|','
name|'pkt_id'
op|'='
number|'0x40'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'struct'
op|'.'
name|'pack'
op|'('
name|'self'
op|'.'
name|'fmt'
op|','
name|'pkt_id'
op|','
number|'0x0'
op|','
name|'self'
op|'.'
name|'session_id'
op|')'
newline|'\n'
nl|'\n'
DECL|member|setup_socket
dedent|''
name|'def'
name|'setup_socket'
op|'('
name|'self'
op|','
name|'mock_socket'
op|','
name|'return_value'
op|','
name|'side_effect'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'socket_obj'
op|'='
name|'mock'
op|'.'
name|'MagicMock'
op|'('
op|')'
newline|'\n'
name|'if'
name|'side_effect'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'socket_obj'
op|'.'
name|'recv'
op|'.'
name|'side_effect'
op|'='
name|'side_effect'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'socket_obj'
op|'.'
name|'recv'
op|'.'
name|'return_value'
op|'='
name|'return_value'
newline|'\n'
dedent|''
name|'mock_socket'
op|'.'
name|'return_value'
op|'='
name|'socket_obj'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'socket'
op|','
string|"'socket'"
op|')'
newline|'\n'
DECL|member|test_vpn_ping_timeout
name|'def'
name|'test_vpn_ping_timeout'
op|'('
name|'self'
op|','
name|'mock_socket'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Server doesn\'t reply within timeout."""'
newline|'\n'
name|'self'
op|'.'
name|'setup_socket'
op|'('
name|'mock_socket'
op|','
name|'None'
op|','
name|'socket'
op|'.'
name|'timeout'
op|')'
newline|'\n'
name|'rc'
op|'='
name|'utils'
op|'.'
name|'vpn_ping'
op|'('
name|'self'
op|'.'
name|'address'
op|','
name|'self'
op|'.'
name|'port'
op|','
nl|'\n'
name|'session_id'
op|'='
name|'self'
op|'.'
name|'session_id'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'rc'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'socket'
op|','
string|"'socket'"
op|')'
newline|'\n'
DECL|member|test_vpn_ping_bad_len
name|'def'
name|'test_vpn_ping_bad_len'
op|'('
name|'self'
op|','
name|'mock_socket'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test a short/invalid server reply."""'
newline|'\n'
name|'self'
op|'.'
name|'setup_socket'
op|'('
name|'mock_socket'
op|','
string|"'fake_reply'"
op|')'
newline|'\n'
name|'rc'
op|'='
name|'utils'
op|'.'
name|'vpn_ping'
op|'('
name|'self'
op|'.'
name|'address'
op|','
name|'self'
op|'.'
name|'port'
op|','
nl|'\n'
name|'session_id'
op|'='
name|'self'
op|'.'
name|'session_id'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'rc'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'socket'
op|','
string|"'socket'"
op|')'
newline|'\n'
DECL|member|test_vpn_ping_bad_id
name|'def'
name|'test_vpn_ping_bad_id'
op|'('
name|'self'
op|','
name|'mock_socket'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Server sends an unknown packet ID."""'
newline|'\n'
name|'self'
op|'.'
name|'setup_socket'
op|'('
name|'mock_socket'
op|','
name|'self'
op|'.'
name|'fake_reply_packet'
op|'('
name|'pkt_id'
op|'='
number|'0x41'
op|')'
op|')'
newline|'\n'
name|'rc'
op|'='
name|'utils'
op|'.'
name|'vpn_ping'
op|'('
name|'self'
op|'.'
name|'address'
op|','
name|'self'
op|'.'
name|'port'
op|','
nl|'\n'
name|'session_id'
op|'='
name|'self'
op|'.'
name|'session_id'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'rc'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'socket'
op|','
string|"'socket'"
op|')'
newline|'\n'
DECL|member|test_vpn_ping_ok
name|'def'
name|'test_vpn_ping_ok'
op|'('
name|'self'
op|','
name|'mock_socket'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'setup_socket'
op|'('
name|'mock_socket'
op|','
name|'self'
op|'.'
name|'fake_reply_packet'
op|'('
op|')'
op|')'
newline|'\n'
name|'rc'
op|'='
name|'utils'
op|'.'
name|'vpn_ping'
op|'('
name|'self'
op|'.'
name|'address'
op|','
name|'self'
op|'.'
name|'port'
op|','
nl|'\n'
name|'session_id'
op|'='
name|'self'
op|'.'
name|'session_id'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'rc'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|MonkeyPatchTestCase
dedent|''
dedent|''
name|'class'
name|'MonkeyPatchTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Unit test for utils.monkey_patch()."""'
newline|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'MonkeyPatchTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'example_package'
op|'='
string|"'nova.tests.unit.monkey_patch_example.'"
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
nl|'\n'
name|'monkey_patch'
op|'='
name|'True'
op|','
nl|'\n'
name|'monkey_patch_modules'
op|'='
op|'['
name|'self'
op|'.'
name|'example_package'
op|'+'
string|"'example_a'"
op|'+'
string|"':'"
nl|'\n'
op|'+'
name|'self'
op|'.'
name|'example_package'
op|'+'
string|"'example_decorator'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_monkey_patch
dedent|''
name|'def'
name|'test_monkey_patch'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'monkey_patch'
op|'('
op|')'
newline|'\n'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
op|'.'
name|'CALLED_FUNCTION'
op|'='
op|'['
op|']'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
name|'import'
name|'example_a'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
name|'import'
name|'example_b'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'Example function'"
op|','
name|'example_a'
op|'.'
name|'example_function_a'
op|'('
op|')'
op|')'
newline|'\n'
name|'exampleA'
op|'='
name|'example_a'
op|'.'
name|'ExampleClassA'
op|'('
op|')'
newline|'\n'
name|'exampleA'
op|'.'
name|'example_method'
op|'('
op|')'
newline|'\n'
name|'ret_a'
op|'='
name|'exampleA'
op|'.'
name|'example_method_add'
op|'('
number|'3'
op|','
number|'5'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ret_a'
op|','
number|'8'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'Example function'"
op|','
name|'example_b'
op|'.'
name|'example_function_b'
op|'('
op|')'
op|')'
newline|'\n'
name|'exampleB'
op|'='
name|'example_b'
op|'.'
name|'ExampleClassB'
op|'('
op|')'
newline|'\n'
name|'exampleB'
op|'.'
name|'example_method'
op|'('
op|')'
newline|'\n'
name|'ret_b'
op|'='
name|'exampleB'
op|'.'
name|'example_method_add'
op|'('
number|'3'
op|','
number|'5'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ret_b'
op|','
number|'8'
op|')'
newline|'\n'
name|'package_a'
op|'='
name|'self'
op|'.'
name|'example_package'
op|'+'
string|"'example_a.'"
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'package_a'
op|'+'
string|"'example_function_a'"
op|','
nl|'\n'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
op|'.'
name|'CALLED_FUNCTION'
op|')'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'package_a'
op|'+'
string|"'ExampleClassA.example_method'"
op|','
nl|'\n'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
op|'.'
name|'CALLED_FUNCTION'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIn'
op|'('
name|'package_a'
op|'+'
string|"'ExampleClassA.example_method_add'"
op|','
nl|'\n'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
op|'.'
name|'CALLED_FUNCTION'
op|')'
newline|'\n'
name|'package_b'
op|'='
name|'self'
op|'.'
name|'example_package'
op|'+'
string|"'example_b.'"
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'package_b'
op|'+'
string|"'example_function_b'"
op|','
nl|'\n'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
op|'.'
name|'CALLED_FUNCTION'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'package_b'
op|'+'
string|"'ExampleClassB.example_method'"
op|','
nl|'\n'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
op|'.'
name|'CALLED_FUNCTION'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'package_b'
op|'+'
string|"'ExampleClassB.example_method_add'"
op|','
nl|'\n'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'monkey_patch_example'
op|'.'
name|'CALLED_FUNCTION'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|MonkeyPatchDefaultTestCase
dedent|''
dedent|''
name|'class'
name|'MonkeyPatchDefaultTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Unit test for default monkey_patch_modules value."""'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'MonkeyPatchDefaultTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'flags'
op|'('
nl|'\n'
name|'monkey_patch'
op|'='
name|'True'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_monkey_patch_default_mod
dedent|''
name|'def'
name|'test_monkey_patch_default_mod'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# monkey_patch_modules is defined to be'
nl|'\n'
comment|'# <module_to_patch>:<decorator_to_patch_with>'
nl|'\n'
comment|'# Here we check that both parts of the default values are'
nl|'\n'
comment|'# valid'
nl|'\n'
indent|' '
name|'for'
name|'module'
name|'in'
name|'CONF'
op|'.'
name|'monkey_patch_modules'
op|':'
newline|'\n'
indent|' '
name|'m'
op|'='
name|'module'
op|'.'
name|'split'
op|'('
string|"':'"
op|','
number|'1'
op|')'
newline|'\n'
comment|'# Check we can import the module to be patched'
nl|'\n'
name|'importlib'
op|'.'
name|'import_module'
op|'('
name|'m'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
comment|'# check the decorator is valid'
nl|'\n'
name|'decorator_name'
op|'='
name|'m'
op|'['
number|'1'
op|']'
op|'.'
name|'rsplit'
op|'('
string|"'.'"
op|','
number|'1'
op|')'
newline|'\n'
name|'decorator_module'
op|'='
name|'importlib'
op|'.'
name|'import_module'
op|'('
name|'decorator_name'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'getattr'
op|'('
name|'decorator_module'
op|','
name|'decorator_name'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|AuditPeriodTest
dedent|''
dedent|''
dedent|''
name|'class'
name|'AuditPeriodTest'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'AuditPeriodTest'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
comment|'# a fairly random time to test with'
nl|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'utils_fixture'
op|'.'
name|'TimeFixture'
op|'('
nl|'\n'
name|'datetime'
op|'.'
name|'datetime'
op|'('
name|'second'
op|'='
number|'23'
op|','
nl|'\n'
name|'minute'
op|'='
number|'12'
op|','
nl|'\n'
name|'hour'
op|'='
number|'8'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hour
dedent|''
name|'def'
name|'test_hour'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'hour'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'hour'
op|'='
number|'7'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'hour'
op|'='
number|'8'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hour_with_offset_before_current
dedent|''
name|'def'
name|'test_hour_with_offset_before_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'hour@10'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'minute'
op|'='
number|'10'
op|','
nl|'\n'
name|'hour'
op|'='
number|'7'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'minute'
op|'='
number|'10'
op|','
nl|'\n'
name|'hour'
op|'='
number|'8'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_hour_with_offset_after_current
dedent|''
name|'def'
name|'test_hour_with_offset_after_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'hour@30'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'minute'
op|'='
number|'30'
op|','
nl|'\n'
name|'hour'
op|'='
number|'6'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'minute'
op|'='
number|'30'
op|','
nl|'\n'
name|'hour'
op|'='
number|'7'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_day
dedent|''
name|'def'
name|'test_day'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'day'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'4'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_day_with_offset_before_current
dedent|''
name|'def'
name|'test_day_with_offset_before_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'day@6'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'hour'
op|'='
number|'6'
op|','
nl|'\n'
name|'day'
op|'='
number|'4'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'hour'
op|'='
number|'6'
op|','
nl|'\n'
name|'day'
op|'='
number|'5'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_day_with_offset_after_current
dedent|''
name|'def'
name|'test_day_with_offset_after_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'day@10'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'hour'
op|'='
number|'10'
op|','
nl|'\n'
name|'day'
op|'='
number|'3'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'hour'
op|'='
number|'10'
op|','
nl|'\n'
name|'day'
op|'='
number|'4'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_month
dedent|''
name|'def'
name|'test_month'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'month'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'2'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_month_with_offset_before_current
dedent|''
name|'def'
name|'test_month_with_offset_before_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'month@2'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'2'
op|','
nl|'\n'
name|'month'
op|'='
number|'2'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'2'
op|','
nl|'\n'
name|'month'
op|'='
number|'3'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_month_with_offset_after_current
dedent|''
name|'def'
name|'test_month_with_offset_after_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'month@15'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'15'
op|','
nl|'\n'
name|'month'
op|'='
number|'1'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'15'
op|','
nl|'\n'
name|'month'
op|'='
number|'2'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_year
dedent|''
name|'def'
name|'test_year'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'year'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'1'
op|','
nl|'\n'
name|'year'
op|'='
number|'2011'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'1'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_year_with_offset_before_current
dedent|''
name|'def'
name|'test_year_with_offset_before_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'year@2'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'2'
op|','
nl|'\n'
name|'year'
op|'='
number|'2011'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'2'
op|','
nl|'\n'
name|'year'
op|'='
number|'2012'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_year_with_offset_after_current
dedent|''
name|'def'
name|'test_year_with_offset_after_current'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'begin'
op|','
name|'end'
op|'='
name|'utils'
op|'.'
name|'last_completed_audit_period'
op|'('
name|'unit'
op|'='
string|"'year@6'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'begin'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'6'
op|','
nl|'\n'
name|'year'
op|'='
number|'2010'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'end'
op|','
name|'datetime'
op|'.'
name|'datetime'
op|'('
nl|'\n'
name|'day'
op|'='
number|'1'
op|','
nl|'\n'
name|'month'
op|'='
number|'6'
op|','
nl|'\n'
name|'year'
op|'='
number|'2011'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|MkfsTestCase
dedent|''
dedent|''
name|'class'
name|'MkfsTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.execute'"
op|')'
newline|'\n'
DECL|member|test_mkfs_ext4
name|'def'
name|'test_mkfs_ext4'
op|'('
name|'self'
op|','
name|'mock_execute'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'mkfs'
op|'('
string|"'ext4'"
op|','
string|"'/my/block/dev'"
op|')'
newline|'\n'
name|'mock_execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'mkfs'"
op|','
string|"'-t'"
op|','
string|"'ext4'"
op|','
string|"'-F'"
op|','
nl|'\n'
string|"'/my/block/dev'"
op|','
name|'run_as_root'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.execute'"
op|')'
newline|'\n'
DECL|member|test_mkfs_msdos
name|'def'
name|'test_mkfs_msdos'
op|'('
name|'self'
op|','
name|'mock_execute'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'mkfs'
op|'('
string|"'msdos'"
op|','
string|"'/my/msdos/block/dev'"
op|')'
newline|'\n'
name|'mock_execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'mkfs'"
op|','
string|"'-t'"
op|','
string|"'msdos'"
op|','
nl|'\n'
string|"'/my/msdos/block/dev'"
op|','
name|'run_as_root'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.execute'"
op|')'
newline|'\n'
DECL|member|test_mkfs_swap
name|'def'
name|'test_mkfs_swap'
op|'('
name|'self'
op|','
name|'mock_execute'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'mkfs'
op|'('
string|"'swap'"
op|','
string|"'/my/swap/block/dev'"
op|')'
newline|'\n'
name|'mock_execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'mkswap'"
op|','
string|"'/my/swap/block/dev'"
op|','
nl|'\n'
name|'run_as_root'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.execute'"
op|')'
newline|'\n'
DECL|member|test_mkfs_ext4_withlabel
name|'def'
name|'test_mkfs_ext4_withlabel'
op|'('
name|'self'
op|','
name|'mock_execute'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'mkfs'
op|'('
string|"'ext4'"
op|','
string|"'/my/block/dev'"
op|','
string|"'ext4-vol'"
op|')'
newline|'\n'
name|'mock_execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'mkfs'"
op|','
string|"'-t'"
op|','
string|"'ext4'"
op|','
string|"'-F'"
op|','
nl|'\n'
string|"'-L'"
op|','
string|"'ext4-vol'"
op|','
string|"'/my/block/dev'"
op|','
name|'run_as_root'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.execute'"
op|')'
newline|'\n'
DECL|member|test_mkfs_msdos_withlabel
name|'def'
name|'test_mkfs_msdos_withlabel'
op|'('
name|'self'
op|','
name|'mock_execute'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'mkfs'
op|'('
string|"'msdos'"
op|','
string|"'/my/msdos/block/dev'"
op|','
string|"'msdos-vol'"
op|')'
newline|'\n'
name|'mock_execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'mkfs'"
op|','
string|"'-t'"
op|','
string|"'msdos'"
op|','
nl|'\n'
string|"'-n'"
op|','
string|"'msdos-vol'"
op|','
string|"'/my/msdos/block/dev'"
op|','
name|'run_as_root'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.utils.execute'"
op|')'
newline|'\n'
DECL|member|test_mkfs_swap_withlabel
name|'def'
name|'test_mkfs_swap_withlabel'
op|'('
name|'self'
op|','
name|'mock_execute'
op|')'
op|':'
newline|'\n'
indent|' '
name|'utils'
op|'.'
name|'mkfs'
op|'('
string|"'swap'"
op|','
string|"'/my/swap/block/dev'"
op|','
string|"'swap-vol'"
op|')'
newline|'\n'
name|'mock_execute'
op|'.'
name|'assert_called_once_with'
op|'('
string|"'mkswap'"
op|','
string|"'-L'"
op|','
string|"'swap-vol'"
op|','
nl|'\n'
string|"'/my/swap/block/dev'"
op|','
name|'run_as_root'
op|'='
name|'False'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|LastBytesTestCase
dedent|''
dedent|''
name|'class'
name|'LastBytesTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Test the last_bytes() utility method."""'
newline|'\n'
nl|'\n'
DECL|member|setUp
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'LastBytesTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'f'
op|'='
name|'six'
op|'.'
name|'BytesIO'
op|'('
string|"b'1234567890'"
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_truncated
dedent|''
name|'def'
name|'test_truncated'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'f'
op|'.'
name|'seek'
op|'('
number|'0'
op|','
name|'os'
op|'.'
name|'SEEK_SET'
op|')'
newline|'\n'
name|'out'
op|','
name|'remaining'
op|'='
name|'utils'
op|'.'
name|'last_bytes'
op|'('
name|'self'
op|'.'
name|'f'
op|','
number|'5'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'out'
op|','
string|"b'67890'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'remaining'
op|'>'
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_read_all
dedent|''
name|'def'
name|'test_read_all'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'f'
op|'.'
name|'seek'
op|'('
number|'0'
op|','
name|'os'
op|'.'
name|'SEEK_SET'
op|')'
newline|'\n'
name|'out'
op|','
name|'remaining'
op|'='
name|'utils'
op|'.'
name|'last_bytes'
op|'('
name|'self'
op|'.'
name|'f'
op|','
number|'1000'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'out'
op|','
string|"b'1234567890'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'remaining'
op|'>'
number|'0'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_seek_too_far_real_file
dedent|''
name|'def'
name|'test_seek_too_far_real_file'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|"# StringIO doesn't raise IOError if you see past the start of the file."
nl|'\n'
indent|' '
name|'with'
name|'tempfile'
op|'.'
name|'TemporaryFile'
op|'('
op|')'
name|'as'
name|'flo'
op|':'
newline|'\n'
indent|' '
name|'content'
op|'='
string|"b'1234567890'"
newline|'\n'
name|'flo'
op|'.'
name|'write'
op|'('
name|'content'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'('
name|'content'
op|','
number|'0'
op|')'
op|','
name|'utils'
op|'.'
name|'last_bytes'
op|'('
name|'flo'
op|','
number|'1000'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|MetadataToDictTestCase
dedent|''
dedent|''
dedent|''
name|'class'
name|'MetadataToDictTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_metadata_to_dict
indent|' '
name|'def'
name|'test_metadata_to_dict'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
nl|'\n'
op|'['
op|'{'
string|"'key'"
op|':'
string|"'foo1'"
op|','
string|"'value'"
op|':'
string|"'bar'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'key'"
op|':'
string|"'foo2'"
op|','
string|"'value'"
op|':'
string|"'baz'"
op|'}'
op|']'
op|')'
op|','
nl|'\n'
op|'{'
string|"'foo1'"
op|':'
string|"'bar'"
op|','
string|"'foo2'"
op|':'
string|"'baz'"
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_metadata_to_dict_with_include_deleted
dedent|''
name|'def'
name|'test_metadata_to_dict_with_include_deleted'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'metadata'
op|'='
op|'['
op|'{'
string|"'key'"
op|':'
string|"'foo1'"
op|','
string|"'value'"
op|':'
string|"'bar'"
op|','
string|"'deleted'"
op|':'
number|'1442875429'
op|','
nl|'\n'
string|"'other'"
op|':'
string|"'stuff'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'key'"
op|':'
string|"'foo2'"
op|','
string|"'value'"
op|':'
string|"'baz'"
op|','
string|"'deleted'"
op|':'
number|'0'
op|','
nl|'\n'
string|"'other'"
op|':'
string|"'stuff2'"
op|'}'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
string|"'foo1'"
op|':'
string|"'bar'"
op|','
string|"'foo2'"
op|':'
string|"'baz'"
op|'}'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
name|'metadata'
op|','
nl|'\n'
name|'include_deleted'
op|'='
name|'True'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
string|"'foo2'"
op|':'
string|"'baz'"
op|'}'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
name|'metadata'
op|','
nl|'\n'
name|'include_deleted'
op|'='
name|'False'
op|')'
op|')'
newline|'\n'
comment|'# verify correct default behavior'
nl|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
name|'metadata'
op|')'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
name|'metadata'
op|','
nl|'\n'
name|'include_deleted'
op|'='
name|'False'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_metadata_to_dict_empty
dedent|''
name|'def'
name|'test_metadata_to_dict_empty'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
op|'}'
op|','
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
op|'['
op|']'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
op|'}'
op|','
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
op|'['
op|']'
op|','
name|'include_deleted'
op|'='
name|'True'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
op|'}'
op|','
name|'utils'
op|'.'
name|'metadata_to_dict'
op|'('
op|'['
op|']'
op|','
name|'include_deleted'
op|'='
name|'False'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dict_to_metadata
dedent|''
name|'def'
name|'test_dict_to_metadata'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
DECL|function|sort_key
indent|' '
name|'def'
name|'sort_key'
op|'('
name|'adict'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'sorted'
op|'('
name|'adict'
op|'.'
name|'items'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'metadata'
op|'='
name|'utils'
op|'.'
name|'dict_to_metadata'
op|'('
name|'dict'
op|'('
name|'foo1'
op|'='
string|"'bar1'"
op|','
name|'foo2'
op|'='
string|"'bar2'"
op|')'
op|')'
newline|'\n'
name|'expected'
op|'='
op|'['
op|'{'
string|"'key'"
op|':'
string|"'foo1'"
op|','
string|"'value'"
op|':'
string|"'bar1'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'key'"
op|':'
string|"'foo2'"
op|','
string|"'value'"
op|':'
string|"'bar2'"
op|'}'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'sorted'
op|'('
name|'metadata'
op|','
name|'key'
op|'='
name|'sort_key'
op|')'
op|','
nl|'\n'
name|'sorted'
op|'('
name|'expected'
op|','
name|'key'
op|'='
name|'sort_key'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dict_to_metadata_empty
dedent|''
name|'def'
name|'test_dict_to_metadata_empty'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'utils'
op|'.'
name|'dict_to_metadata'
op|'('
op|'{'
op|'}'
op|')'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ExpectedArgsTestCase
dedent|''
dedent|''
name|'class'
name|'ExpectedArgsTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_passes
indent|' '
name|'def'
name|'test_passes'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'utils'
op|'.'
name|'expects_func_args'
op|'('
string|"'foo'"
op|','
string|"'baz'"
op|')'
newline|'\n'
DECL|function|dec
name|'def'
name|'dec'
op|'('
name|'f'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'f'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'dec'
newline|'\n'
DECL|function|func
name|'def'
name|'func'
op|'('
name|'foo'
op|','
name|'bar'
op|','
name|'baz'
op|'='
string|'"lol"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
comment|'# Call to ensure nothing errors'
nl|'\n'
dedent|''
name|'func'
op|'('
name|'None'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_raises
dedent|''
name|'def'
name|'test_raises'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'utils'
op|'.'
name|'expects_func_args'
op|'('
string|"'foo'"
op|','
string|"'baz'"
op|')'
newline|'\n'
DECL|function|dec
name|'def'
name|'dec'
op|'('
name|'f'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'f'
newline|'\n'
nl|'\n'
DECL|function|func
dedent|''
name|'def'
name|'func'
op|'('
name|'bar'
op|','
name|'baz'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'TypeError'
op|','
name|'dec'
op|','
name|'func'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_var_no_of_args
dedent|''
name|'def'
name|'test_var_no_of_args'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'utils'
op|'.'
name|'expects_func_args'
op|'('
string|"'foo'"
op|')'
newline|'\n'
DECL|function|dec
name|'def'
name|'dec'
op|'('
name|'f'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'f'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'dec'
newline|'\n'
DECL|function|func
name|'def'
name|'func'
op|'('
name|'bar'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
comment|'# Call to ensure nothing errors'
nl|'\n'
dedent|''
name|'func'
op|'('
name|'None'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_more_layers
dedent|''
name|'def'
name|'test_more_layers'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
op|'@'
name|'utils'
op|'.'
name|'expects_func_args'
op|'('
string|"'foo'"
op|','
string|"'baz'"
op|')'
newline|'\n'
DECL|function|dec
name|'def'
name|'dec'
op|'('
name|'f'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'f'
newline|'\n'
nl|'\n'
DECL|function|dec_2
dedent|''
name|'def'
name|'dec_2'
op|'('
name|'f'
op|')'
op|':'
newline|'\n'
DECL|function|inner_f
indent|' '
name|'def'
name|'inner_f'
op|'('
op|'*'
name|'a'
op|','
op|'**'
name|'k'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'f'
op|'('
op|')'
newline|'\n'
dedent|''
name|'return'
name|'inner_f'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'dec_2'
newline|'\n'
DECL|function|func
name|'def'
name|'func'
op|'('
name|'bar'
op|','
name|'baz'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'TypeError'
op|','
name|'dec'
op|','
name|'func'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|StringLengthTestCase
dedent|''
dedent|''
name|'class'
name|'StringLengthTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_check_string_length
indent|' '
name|'def'
name|'test_check_string_length'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'utils'
op|'.'
name|'check_string_length'
op|'('
nl|'\n'
string|"'test'"
op|','
string|"'name'"
op|','
name|'max_length'
op|'='
number|'255'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'check_string_length'
op|','
nl|'\n'
number|'11'
op|','
string|"'name'"
op|','
name|'max_length'
op|'='
number|'255'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'check_string_length'
op|','
nl|'\n'
string|"''"
op|','
string|"'name'"
op|','
name|'min_length'
op|'='
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'check_string_length'
op|','
nl|'\n'
string|"'a'"
op|'*'
number|'256'
op|','
string|"'name'"
op|','
name|'max_length'
op|'='
number|'255'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_check_string_length_noname
dedent|''
name|'def'
name|'test_check_string_length_noname'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'utils'
op|'.'
name|'check_string_length'
op|'('
nl|'\n'
string|"'test'"
op|','
name|'max_length'
op|'='
number|'255'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'check_string_length'
op|','
nl|'\n'
number|'11'
op|','
name|'max_length'
op|'='
number|'255'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'check_string_length'
op|','
nl|'\n'
string|"''"
op|','
name|'min_length'
op|'='
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'check_string_length'
op|','
nl|'\n'
string|"'a'"
op|'*'
number|'256'
op|','
name|'max_length'
op|'='
number|'255'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ValidateIntegerTestCase
dedent|''
dedent|''
name|'class'
name|'ValidateIntegerTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_valid_inputs
indent|' '
name|'def'
name|'test_valid_inputs'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|'('
number|'42'
op|','
string|'"answer"'
op|')'
op|','
number|'42'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|'('
string|'"42"'
op|','
string|'"answer"'
op|')'
op|','
number|'42'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|'('
nl|'\n'
string|'"7"'
op|','
string|'"lucky"'
op|','
name|'min_value'
op|'='
number|'7'
op|','
name|'max_value'
op|'='
number|'8'
op|')'
op|','
number|'7'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|'('
nl|'\n'
number|'7'
op|','
string|'"lucky"'
op|','
name|'min_value'
op|'='
number|'6'
op|','
name|'max_value'
op|'='
number|'7'
op|')'
op|','
number|'7'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|'('
nl|'\n'
number|'300'
op|','
string|'"Spartaaa!!!"'
op|','
name|'min_value'
op|'='
number|'300'
op|')'
op|','
number|'300'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|'('
nl|'\n'
string|'"300"'
op|','
string|'"Spartaaa!!!"'
op|','
name|'max_value'
op|'='
number|'300'
op|')'
op|','
number|'300'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_invalid_inputs
dedent|''
name|'def'
name|'test_invalid_inputs'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|','
nl|'\n'
string|'"im-not-an-int"'
op|','
string|'"not-an-int"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|','
nl|'\n'
number|'3.14'
op|','
string|'"Pie"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|','
nl|'\n'
string|'"299"'
op|','
string|'"Sparta no-show"'
op|','
nl|'\n'
name|'min_value'
op|'='
number|'300'
op|','
name|'max_value'
op|'='
number|'300'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|','
nl|'\n'
number|'55'
op|','
string|'"doing 55 in a 54"'
op|','
nl|'\n'
name|'max_value'
op|'='
number|'54'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertRaises'
op|'('
name|'exception'
op|'.'
name|'InvalidInput'
op|','
nl|'\n'
name|'utils'
op|'.'
name|'validate_integer'
op|','
nl|'\n'
name|'six'
op|'.'
name|'unichr'
op|'('
number|'129'
op|')'
op|','
string|'"UnicodeError"'
op|','
nl|'\n'
name|'max_value'
op|'='
number|'1000'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ValidateNeutronConfiguration
dedent|''
dedent|''
name|'class'
name|'ValidateNeutronConfiguration'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_nova_network
indent|' '
name|'def'
name|'test_nova_network'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'utils'
op|'.'
name|'is_neutron'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_neutron
dedent|''
name|'def'
name|'test_neutron'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'flags'
op|'('
name|'use_neutron'
op|'='
name|'True'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'utils'
op|'.'
name|'is_neutron'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|AutoDiskConfigUtilTestCase
dedent|''
dedent|''
name|'class'
name|'AutoDiskConfigUtilTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_is_auto_disk_config_disabled
indent|' '
name|'def'
name|'test_is_auto_disk_config_disabled'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'utils'
op|'.'
name|'is_auto_disk_config_disabled'
op|'('
string|'"Disabled "'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_auto_disk_config_disabled_none
dedent|''
name|'def'
name|'test_is_auto_disk_config_disabled_none'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'utils'
op|'.'
name|'is_auto_disk_config_disabled'
op|'('
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_is_auto_disk_config_disabled_false
dedent|''
name|'def'
name|'test_is_auto_disk_config_disabled_false'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'utils'
op|'.'
name|'is_auto_disk_config_disabled'
op|'('
string|'"false"'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|GetSystemMetadataFromImageTestCase
dedent|''
dedent|''
name|'class'
name|'GetSystemMetadataFromImageTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|get_image
indent|' '
name|'def'
name|'get_image'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image_meta'
op|'='
op|'{'
nl|'\n'
string|'"id"'
op|':'
string|'"fake-image"'
op|','
nl|'\n'
string|'"name"'
op|':'
string|'"fake-name"'
op|','
nl|'\n'
string|'"min_ram"'
op|':'
number|'1'
op|','
nl|'\n'
string|'"min_disk"'
op|':'
number|'1'
op|','
nl|'\n'
string|'"disk_format"'
op|':'
string|'"raw"'
op|','
nl|'\n'
string|'"container_format"'
op|':'
string|'"bare"'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'return'
name|'image_meta'
newline|'\n'
nl|'\n'
DECL|member|get_flavor
dedent|''
name|'def'
name|'get_flavor'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'flavor'
op|'='
op|'{'
nl|'\n'
string|'"id"'
op|':'
string|'"fake.flavor"'
op|','
nl|'\n'
string|'"root_gb"'
op|':'
number|'10'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'return'
name|'flavor'
newline|'\n'
nl|'\n'
DECL|member|test_base_image_properties
dedent|''
name|'def'
name|'test_base_image_properties'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'self'
op|'.'
name|'get_image'
op|'('
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that we inherit all the needed keys'
nl|'\n'
name|'sys_meta'
op|'='
name|'utils'
op|'.'
name|'get_system_metadata_from_image'
op|'('
name|'image'
op|')'
newline|'\n'
name|'for'
name|'key'
name|'in'
name|'utils'
op|'.'
name|'SM_INHERITABLE_KEYS'
op|':'
newline|'\n'
indent|' '
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
name|'key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image'
op|'['
name|'key'
op|']'
op|','
name|'sys_meta'
op|'.'
name|'get'
op|'('
name|'sys_key'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that everything else is ignored'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'len'
op|'('
name|'sys_meta'
op|')'
op|','
name|'len'
op|'('
name|'utils'
op|'.'
name|'SM_INHERITABLE_KEYS'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_inherit_image_properties
dedent|''
name|'def'
name|'test_inherit_image_properties'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'self'
op|'.'
name|'get_image'
op|'('
op|')'
newline|'\n'
name|'image'
op|'['
string|'"properties"'
op|']'
op|'='
op|'{'
string|'"foo1"'
op|':'
string|'"bar"'
op|','
string|'"foo2"'
op|':'
string|'"baz"'
op|'}'
newline|'\n'
nl|'\n'
name|'sys_meta'
op|'='
name|'utils'
op|'.'
name|'get_system_metadata_from_image'
op|'('
name|'image'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that we inherit all the image properties'
nl|'\n'
name|'for'
name|'key'
op|','
name|'expected'
name|'in'
name|'six'
op|'.'
name|'iteritems'
op|'('
name|'image'
op|'['
string|'"properties"'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
name|'key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'sys_meta'
op|'['
name|'sys_key'
op|']'
op|','
name|'expected'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_skip_image_properties
dedent|''
dedent|''
name|'def'
name|'test_skip_image_properties'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'self'
op|'.'
name|'get_image'
op|'('
op|')'
newline|'\n'
name|'image'
op|'['
string|'"properties"'
op|']'
op|'='
op|'{'
nl|'\n'
string|'"foo1"'
op|':'
string|'"bar"'
op|','
string|'"foo2"'
op|':'
string|'"baz"'
op|','
nl|'\n'
string|'"mappings"'
op|':'
string|'"wizz"'
op|','
string|'"img_block_device_mapping"'
op|':'
string|'"eek"'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'sys_meta'
op|'='
name|'utils'
op|'.'
name|'get_system_metadata_from_image'
op|'('
name|'image'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that we inherit all the image properties'
nl|'\n'
name|'for'
name|'key'
op|','
name|'expected'
name|'in'
name|'six'
op|'.'
name|'iteritems'
op|'('
name|'image'
op|'['
string|'"properties"'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
name|'key'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'key'
name|'in'
name|'utils'
op|'.'
name|'SM_SKIP_KEYS'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'sys_key'
op|','
name|'sys_meta'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'sys_meta'
op|'['
name|'sys_key'
op|']'
op|','
name|'expected'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_vhd_min_disk_image
dedent|''
dedent|''
dedent|''
name|'def'
name|'test_vhd_min_disk_image'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'self'
op|'.'
name|'get_image'
op|'('
op|')'
newline|'\n'
name|'flavor'
op|'='
name|'self'
op|'.'
name|'get_flavor'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'image'
op|'['
string|'"disk_format"'
op|']'
op|'='
string|'"vhd"'
newline|'\n'
nl|'\n'
name|'sys_meta'
op|'='
name|'utils'
op|'.'
name|'get_system_metadata_from_image'
op|'('
name|'image'
op|','
name|'flavor'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that the min_disk property is taken from'
nl|'\n'
comment|"# flavor's root_gb when using vhd disk format"
nl|'\n'
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
string|'"min_disk"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'sys_meta'
op|'['
name|'sys_key'
op|']'
op|','
name|'flavor'
op|'['
string|'"root_gb"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dont_inherit_empty_values
dedent|''
name|'def'
name|'test_dont_inherit_empty_values'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'='
name|'self'
op|'.'
name|'get_image'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'key'
name|'in'
name|'utils'
op|'.'
name|'SM_INHERITABLE_KEYS'
op|':'
newline|'\n'
indent|' '
name|'image'
op|'['
name|'key'
op|']'
op|'='
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'sys_meta'
op|'='
name|'utils'
op|'.'
name|'get_system_metadata_from_image'
op|'('
name|'image'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that the empty properties have not been inherited'
nl|'\n'
name|'for'
name|'key'
name|'in'
name|'utils'
op|'.'
name|'SM_INHERITABLE_KEYS'
op|':'
newline|'\n'
indent|' '
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
name|'key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'sys_key'
op|','
name|'sys_meta'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|GetImageFromSystemMetadataTestCase
dedent|''
dedent|''
dedent|''
name|'class'
name|'GetImageFromSystemMetadataTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|get_system_metadata
indent|' '
name|'def'
name|'get_system_metadata'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'sys_meta'
op|'='
op|'{'
nl|'\n'
string|'"image_min_ram"'
op|':'
number|'1'
op|','
nl|'\n'
string|'"image_min_disk"'
op|':'
number|'1'
op|','
nl|'\n'
string|'"image_disk_format"'
op|':'
string|'"raw"'
op|','
nl|'\n'
string|'"image_container_format"'
op|':'
string|'"bare"'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
name|'return'
name|'sys_meta'
newline|'\n'
nl|'\n'
DECL|member|test_image_from_system_metadata
dedent|''
name|'def'
name|'test_image_from_system_metadata'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'sys_meta'
op|'='
name|'self'
op|'.'
name|'get_system_metadata'
op|'('
op|')'
newline|'\n'
name|'sys_meta'
op|'['
string|'"%soo1"'
op|'%'
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|']'
op|'='
string|'"bar"'
newline|'\n'
name|'sys_meta'
op|'['
string|'"%soo2"'
op|'%'
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|']'
op|'='
string|'"baz"'
newline|'\n'
name|'sys_meta'
op|'['
string|'"%simg_block_device_mapping"'
op|'%'
nl|'\n'
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|']'
op|'='
string|'"eek"'
newline|'\n'
nl|'\n'
name|'image'
op|'='
name|'utils'
op|'.'
name|'get_image_from_system_metadata'
op|'('
name|'sys_meta'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that we inherit all the needed keys'
nl|'\n'
name|'for'
name|'key'
name|'in'
name|'utils'
op|'.'
name|'SM_INHERITABLE_KEYS'
op|':'
newline|'\n'
indent|' '
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
name|'key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image'
op|'['
name|'key'
op|']'
op|','
name|'sys_meta'
op|'.'
name|'get'
op|'('
name|'sys_key'
op|')'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that we inherit the rest of metadata as properties'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertIn'
op|'('
string|'"properties"'
op|','
name|'image'
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'key'
op|','
name|'value'
name|'in'
name|'six'
op|'.'
name|'iteritems'
op|'('
name|'image'
op|'['
string|'"properties"'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
name|'key'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'image'
op|'['
string|'"properties"'
op|']'
op|'['
name|'key'
op|']'
op|','
name|'sys_meta'
op|'['
name|'sys_key'
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'assertNotIn'
op|'('
string|'"img_block_device_mapping"'
op|','
name|'image'
op|'['
string|'"properties"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_dont_inherit_empty_values
dedent|''
name|'def'
name|'test_dont_inherit_empty_values'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'sys_meta'
op|'='
name|'self'
op|'.'
name|'get_system_metadata'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'for'
name|'key'
name|'in'
name|'utils'
op|'.'
name|'SM_INHERITABLE_KEYS'
op|':'
newline|'\n'
indent|' '
name|'sys_key'
op|'='
string|'"%s%s"'
op|'%'
op|'('
name|'utils'
op|'.'
name|'SM_IMAGE_PROP_PREFIX'
op|','
name|'key'
op|')'
newline|'\n'
name|'sys_meta'
op|'['
name|'sys_key'
op|']'
op|'='
name|'None'
newline|'\n'
nl|'\n'
dedent|''
name|'image'
op|'='
name|'utils'
op|'.'
name|'get_image_from_system_metadata'
op|'('
name|'sys_meta'
op|')'
newline|'\n'
nl|'\n'
comment|'# Verify that the empty properties have not been inherited'
nl|'\n'
name|'for'
name|'key'
name|'in'
name|'utils'
op|'.'
name|'SM_INHERITABLE_KEYS'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertNotIn'
op|'('
name|'key'
op|','
name|'image'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|GetImageMetadataFromVolumeTestCase
dedent|''
dedent|''
dedent|''
name|'class'
name|'GetImageMetadataFromVolumeTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_inherit_image_properties
indent|' '
name|'def'
name|'test_inherit_image_properties'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'properties'
op|'='
op|'{'
string|'"fake_prop"'
op|':'
string|'"fake_value"'
op|'}'
newline|'\n'
name|'volume'
op|'='
op|'{'
string|'"volume_image_metadata"'
op|':'
name|'properties'
op|'}'
newline|'\n'
name|'image_meta'
op|'='
name|'utils'
op|'.'
name|'get_image_metadata_from_volume'
op|'('
name|'volume'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'properties'
op|','
name|'image_meta'
op|'['
string|'"properties"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_image_size
dedent|''
name|'def'
name|'test_image_size'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'volume'
op|'='
op|'{'
string|'"size"'
op|':'
number|'10'
op|'}'
newline|'\n'
name|'image_meta'
op|'='
name|'utils'
op|'.'
name|'get_image_metadata_from_volume'
op|'('
name|'volume'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'10'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|','
name|'image_meta'
op|'['
string|'"size"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_image_status
dedent|''
name|'def'
name|'test_image_status'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'volume'
op|'='
op|'{'
op|'}'
newline|'\n'
name|'image_meta'
op|'='
name|'utils'
op|'.'
name|'get_image_metadata_from_volume'
op|'('
name|'volume'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'"active"'
op|','
name|'image_meta'
op|'['
string|'"status"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_values_conversion
dedent|''
name|'def'
name|'test_values_conversion'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'properties'
op|'='
op|'{'
string|'"min_ram"'
op|':'
string|'"5"'
op|','
string|'"min_disk"'
op|':'
string|'"7"'
op|'}'
newline|'\n'
name|'volume'
op|'='
op|'{'
string|'"volume_image_metadata"'
op|':'
name|'properties'
op|'}'
newline|'\n'
name|'image_meta'
op|'='
name|'utils'
op|'.'
name|'get_image_metadata_from_volume'
op|'('
name|'volume'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'5'
op|','
name|'image_meta'
op|'['
string|'"min_ram"'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'7'
op|','
name|'image_meta'
op|'['
string|'"min_disk"'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_suppress_not_image_properties
dedent|''
name|'def'
name|'test_suppress_not_image_properties'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'properties'
op|'='
op|'{'
string|'"min_ram"'
op|':'
string|'"256"'
op|','
string|'"min_disk"'
op|':'
string|'"128"'
op|','
nl|'\n'
string|'"image_id"'
op|':'
string|'"fake_id"'
op|','
string|'"image_name"'
op|':'
string|'"fake_name"'
op|','
nl|'\n'
string|'"container_format"'
op|':'
string|'"ami"'
op|','
string|'"disk_format"'
op|':'
string|'"ami"'
op|','
nl|'\n'
string|'"size"'
op|':'
string|'"1234"'
op|','
string|'"checksum"'
op|':'
string|'"fake_checksum"'
op|'}'
newline|'\n'
name|'volume'
op|'='
op|'{'
string|'"volume_image_metadata"'
op|':'
name|'properties'
op|'}'
newline|'\n'
name|'image_meta'
op|'='
name|'utils'
op|'.'
name|'get_image_metadata_from_volume'
op|'('
name|'volume'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
op|'{'
op|'}'
op|','
name|'image_meta'
op|'['
string|'"properties"'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'0'
op|','
name|'image_meta'
op|'['
string|'"size"'
op|']'
op|')'
newline|'\n'
comment|"# volume's properties should not be touched"
nl|'\n'
name|'self'
op|'.'
name|'assertNotEqual'
op|'('
op|'{'
op|'}'
op|','
name|'properties'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ResourceFilterTestCase
dedent|''
dedent|''
name|'class'
name|'ResourceFilterTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|_assert_filtering
indent|' '
name|'def'
name|'_assert_filtering'
op|'('
name|'self'
op|','
name|'res_list'
op|','
name|'filts'
op|','
name|'expected_tags'
op|')'
op|':'
newline|'\n'
indent|' '
name|'actual_tags'
op|'='
name|'utils'
op|'.'
name|'filter_and_format_resource_metadata'
op|'('
string|"'instance'"
op|','
nl|'\n'
name|'res_list'
op|','
name|'filts'
op|','
string|"'metadata'"
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertJsonEqual'
op|'('
name|'expected_tags'
op|','
name|'actual_tags'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_filter_and_format_resource_metadata
dedent|''
name|'def'
name|'test_filter_and_format_resource_metadata'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Create some tags'
nl|'\n'
comment|'# One overlapping pair, and one different key value pair'
nl|'\n'
comment|'# i1 : foo=bar, bax=wibble'
nl|'\n'
comment|'# i2 : foo=bar, baz=quux'
nl|'\n'
nl|'\n'
comment|'# resources'
nl|'\n'
indent|' '
name|'i1'
op|'='
op|'{'
nl|'\n'
string|"'uuid'"
op|':'
string|"'1'"
op|','
nl|'\n'
string|"'metadata'"
op|':'
op|'{'
string|"'foo'"
op|':'
string|"'bar'"
op|','
string|"'bax'"
op|':'
string|"'wibble'"
op|'}'
op|','
nl|'\n'
op|'}'
newline|'\n'
name|'i2'
op|'='
op|'{'
nl|'\n'
string|"'uuid'"
op|':'
string|"'2'"
op|','
nl|'\n'
string|"'metadata'"
op|':'
op|'{'
string|"'foo'"
op|':'
string|"'bar'"
op|','
string|"'baz'"
op|':'
string|"'quux'"
op|'}'
op|','
nl|'\n'
op|'}'
newline|'\n'
nl|'\n'
comment|'# Resources list'
nl|'\n'
name|'rl'
op|'='
op|'['
name|'i1'
op|','
name|'i2'
op|']'
newline|'\n'
nl|'\n'
comment|'# tags'
nl|'\n'
name|'i11'
op|'='
op|'{'
string|"'instance_id'"
op|':'
string|"'1'"
op|','
string|"'key'"
op|':'
string|"'foo'"
op|','
string|"'value'"
op|':'
string|"'bar'"
op|'}'
newline|'\n'
name|'i12'
op|'='
op|'{'
string|"'instance_id'"
op|':'
string|"'1'"
op|','
string|"'key'"
op|':'
string|"'bax'"
op|','
string|"'value'"
op|':'
string|"'wibble'"
op|'}'
newline|'\n'
name|'i21'
op|'='
op|'{'
string|"'instance_id'"
op|':'
string|"'2'"
op|','
string|"'key'"
op|':'
string|"'foo'"
op|','
string|"'value'"
op|':'
string|"'bar'"
op|'}'
newline|'\n'
name|'i22'
op|'='
op|'{'
string|"'instance_id'"
op|':'
string|"'2'"
op|','
string|"'key'"
op|':'
string|"'baz'"
op|','
string|"'value'"
op|':'
string|"'quux'"
op|'}'
newline|'\n'
nl|'\n'
comment|'# No filter'
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'['
op|']'
op|','
op|'['
name|'i11'
op|','
name|'i12'
op|','
name|'i21'
op|','
name|'i22'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
op|'}'
op|','
op|'['
name|'i11'
op|','
name|'i12'
op|','
name|'i21'
op|','
name|'i22'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Key search'
nl|'\n'
nl|'\n'
comment|"# Both should have tags with key 'foo' and value 'bar'"
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'key'"
op|':'
string|"'foo'"
op|','
string|"'value'"
op|':'
string|"'bar'"
op|'}'
op|','
op|'['
name|'i11'
op|','
name|'i21'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|"# Both should have tags with key 'foo'"
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'key'"
op|':'
string|"'foo'"
op|'}'
op|','
op|'['
name|'i11'
op|','
name|'i21'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|"# Only i2 should have tags with key 'baz' and value 'quux'"
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'key'"
op|':'
string|"'baz'"
op|','
string|"'value'"
op|':'
string|"'quux'"
op|'}'
op|','
op|'['
name|'i22'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|"# Only i2 should have tags with value 'quux'"
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'value'"
op|':'
string|"'quux'"
op|'}'
op|','
op|'['
name|'i22'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Empty list should be returned when no tags match'
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'key'"
op|':'
string|"'split'"
op|','
string|"'value'"
op|':'
string|"'banana'"
op|'}'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Multiple values'
nl|'\n'
nl|'\n'
comment|"# Only i2 should have tags with key 'baz' and values in the set"
nl|'\n'
comment|"# ['quux', 'wibble']"
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'key'"
op|':'
string|"'baz'"
op|','
string|"'value'"
op|':'
op|'['
string|"'quux'"
op|','
string|"'wibble'"
op|']'
op|'}'
op|','
nl|'\n'
op|'['
name|'i22'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# But when specified as two different filters, no tags should be'
nl|'\n'
comment|'# returned. This is because, the filter will mean "return tags which'
nl|'\n'
comment|'# have (key=baz AND value=quux) AND (key=baz AND value=wibble)'
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'['
op|'{'
string|"'key'"
op|':'
string|"'baz'"
op|','
string|"'value'"
op|':'
string|"'quux'"
op|'}'
op|','
nl|'\n'
op|'{'
string|"'key'"
op|':'
string|"'baz'"
op|','
string|"'value'"
op|':'
string|"'wibble'"
op|'}'
op|']'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Test for regex'
nl|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'value'"
op|':'
string|"'\\\\Aqu..*\\\\Z(?s)'"
op|'}'
op|','
op|'['
name|'i22'
op|']'
op|')'
newline|'\n'
nl|'\n'
comment|'# Make sure bug #1365887 is fixed'
nl|'\n'
name|'i1'
op|'['
string|"'metadata'"
op|']'
op|'['
string|"'key3'"
op|']'
op|'='
string|"'a'"
newline|'\n'
name|'self'
op|'.'
name|'_assert_filtering'
op|'('
name|'rl'
op|','
op|'{'
string|"'value'"
op|':'
string|"'banana'"
op|'}'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SafeTruncateTestCase
dedent|''
dedent|''
name|'class'
name|'SafeTruncateTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_exception_to_dict_with_long_message_3_bytes
indent|' '
name|'def'
name|'test_exception_to_dict_with_long_message_3_bytes'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Generate Chinese byte string whose length is 300. This Chinese UTF-8'
nl|'\n'
comment|'# character occupies 3 bytes. After truncating, the byte string length'
nl|'\n'
comment|'# should be 255.'
nl|'\n'
indent|' '
name|'msg'
op|'='
string|"u'\\u8d75'"
op|'*'
number|'100'
newline|'\n'
name|'truncated_msg'
op|'='
name|'utils'
op|'.'
name|'safe_truncate'
op|'('
name|'msg'
op|','
number|'255'
op|')'
newline|'\n'
name|'byte_message'
op|'='
name|'encodeutils'
op|'.'
name|'safe_encode'
op|'('
name|'truncated_msg'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'255'
op|','
name|'len'
op|'('
name|'byte_message'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_exception_to_dict_with_long_message_2_bytes
dedent|''
name|'def'
name|'test_exception_to_dict_with_long_message_2_bytes'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
comment|'# Generate Russian byte string whose length is 300. This Russian UTF-8'
nl|'\n'
comment|'# character occupies 2 bytes. After truncating, the byte string length'
nl|'\n'
comment|'# should be 254.'
nl|'\n'
indent|' '
name|'msg'
op|'='
name|'encodeutils'
op|'.'
name|'safe_decode'
op|'('
string|"'\\xd0\\x92'"
op|'*'
number|'150'
op|')'
newline|'\n'
name|'truncated_msg'
op|'='
name|'utils'
op|'.'
name|'safe_truncate'
op|'('
name|'msg'
op|','
number|'255'
op|')'
newline|'\n'
name|'byte_message'
op|'='
name|'encodeutils'
op|'.'
name|'safe_encode'
op|'('
name|'truncated_msg'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'254'
op|','
name|'len'
op|'('
name|'byte_message'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SpawnNTestCase
dedent|''
dedent|''
name|'class'
name|'SpawnNTestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'SpawnNTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'useFixture'
op|'('
name|'context_fixture'
op|'.'
name|'ClearRequestContext'
op|'('
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'spawn_name'
op|'='
string|"'spawn_n'"
newline|'\n'
nl|'\n'
DECL|member|test_spawn_n_no_context
dedent|''
name|'def'
name|'test_spawn_n_no_context'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'common_context'
op|'.'
name|'get_current'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|function|_fake_spawn
name|'def'
name|'_fake_spawn'
op|'('
name|'func'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
comment|'# call the method to ensure no error is raised'
nl|'\n'
indent|' '
name|'func'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'test'"
op|','
name|'args'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake
dedent|''
name|'def'
name|'fake'
op|'('
name|'arg'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'eventlet'
op|','
name|'self'
op|'.'
name|'spawn_name'
op|','
name|'_fake_spawn'
op|')'
op|':'
newline|'\n'
indent|' '
name|'getattr'
op|'('
name|'utils'
op|','
name|'self'
op|'.'
name|'spawn_name'
op|')'
op|'('
name|'fake'
op|','
string|"'test'"
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'common_context'
op|'.'
name|'get_current'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_spawn_n_context
dedent|''
name|'def'
name|'test_spawn_n_context'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'common_context'
op|'.'
name|'get_current'
op|'('
op|')'
op|')'
newline|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
nl|'\n'
DECL|function|_fake_spawn
name|'def'
name|'_fake_spawn'
op|'('
name|'func'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
comment|'# call the method to ensure no error is raised'
nl|'\n'
indent|' '
name|'func'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ctxt'
op|','
name|'args'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'test'"
op|','
name|'kwargs'
op|'['
string|"'kwarg1'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake
dedent|''
name|'def'
name|'fake'
op|'('
name|'context'
op|','
name|'kwarg1'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'eventlet'
op|','
name|'self'
op|'.'
name|'spawn_name'
op|','
name|'_fake_spawn'
op|')'
op|':'
newline|'\n'
indent|' '
name|'getattr'
op|'('
name|'utils'
op|','
name|'self'
op|'.'
name|'spawn_name'
op|')'
op|'('
name|'fake'
op|','
name|'ctxt'
op|','
name|'kwarg1'
op|'='
string|"'test'"
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ctxt'
op|','
name|'common_context'
op|'.'
name|'get_current'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_spawn_n_context_different_from_passed
dedent|''
name|'def'
name|'test_spawn_n_context_different_from_passed'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsNone'
op|'('
name|'common_context'
op|'.'
name|'get_current'
op|'('
op|')'
op|')'
newline|'\n'
name|'ctxt'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|')'
newline|'\n'
name|'ctxt_passed'
op|'='
name|'context'
op|'.'
name|'RequestContext'
op|'('
string|"'user'"
op|','
string|"'project'"
op|','
nl|'\n'
name|'overwrite'
op|'='
name|'False'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ctxt'
op|','
name|'common_context'
op|'.'
name|'get_current'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|function|_fake_spawn
name|'def'
name|'_fake_spawn'
op|'('
name|'func'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
comment|'# call the method to ensure no error is raised'
nl|'\n'
indent|' '
name|'func'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ctxt_passed'
op|','
name|'args'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|"'test'"
op|','
name|'kwargs'
op|'['
string|"'kwarg1'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|function|fake
dedent|''
name|'def'
name|'fake'
op|'('
name|'context'
op|','
name|'kwarg1'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
dedent|''
name|'with'
name|'mock'
op|'.'
name|'patch'
op|'.'
name|'object'
op|'('
name|'eventlet'
op|','
name|'self'
op|'.'
name|'spawn_name'
op|','
name|'_fake_spawn'
op|')'
op|':'
newline|'\n'
indent|' '
name|'getattr'
op|'('
name|'utils'
op|','
name|'self'
op|'.'
name|'spawn_name'
op|')'
op|'('
name|'fake'
op|','
name|'ctxt_passed'
op|','
name|'kwarg1'
op|'='
string|"'test'"
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'ctxt'
op|','
name|'common_context'
op|'.'
name|'get_current'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SpawnTestCase
dedent|''
dedent|''
name|'class'
name|'SpawnTestCase'
op|'('
name|'SpawnNTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'SpawnTestCase'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'spawn_name'
op|'='
string|"'spawn'"
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|UT8TestCase
dedent|''
dedent|''
name|'class'
name|'UT8TestCase'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
DECL|member|test_none_value
indent|' '
name|'def'
name|'test_none_value'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'None'
op|')'
op|','
name|'type'
op|'('
name|'None'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_bytes_value
dedent|''
name|'def'
name|'test_bytes_value'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'some_value'
op|'='
string|'b"fake data"'
newline|'\n'
name|'return_value'
op|'='
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'some_value'
op|')'
newline|'\n'
comment|"# check that type of returned value doesn't changed"
nl|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'return_value'
op|','
name|'type'
op|'('
name|'some_value'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'some_value'
op|','
name|'return_value'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_not_text_type
dedent|''
name|'def'
name|'test_not_text_type'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return_value'
op|'='
name|'utils'
op|'.'
name|'utf8'
op|'('
number|'1'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
string|'b"1"'
op|','
name|'return_value'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertIsInstance'
op|'('
name|'return_value'
op|','
name|'six'
op|'.'
name|'binary_type'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_text_type_with_encoding
dedent|''
name|'def'
name|'test_text_type_with_encoding'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'some_value'
op|'='
string|"'test\\u2026config'"
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
name|'some_value'
op|','
name|'utils'
op|'.'
name|'utf8'
op|'('
name|'some_value'
op|')'
op|'.'
name|'decode'
op|'('
string|'"utf-8"'
op|')'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.225496 | 88 | 0.593371 | 18,178 | 121,986 | 3.886951 | 0.038948 | 0.153106 | 0.092418 | 0.064028 | 0.904595 | 0.864415 | 0.828071 | 0.791146 | 0.763548 | 0.727019 | 0 | 0.010649 | 0.10011 | 121,986 | 9,977 | 89 | 12.226721 | 0.63301 | 0 | 0 | 0.944573 | 0 | 0.000601 | 0.484793 | 0.067745 | 0 | 0 | 0.000139 | 0 | 0.022452 | 0 | null | null | 0.002305 | 0.003308 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
67326ea34fbd383db1c9375dbd6d05a3282a03b4 | 4,806 | py | Python | MLSD/Transformers/Bag_Transformers.py | HaoranXue/Machine_Learning_For_Structured_Data | 376fb2b78ba5dea4d6214931f6a60e3b4477c883 | [
"MIT"
] | 4 | 2017-09-03T23:09:02.000Z | 2017-09-15T13:01:38.000Z | MLSD/Transformers/Bag_Transformers.py | HaoranXue/SDM | 376fb2b78ba5dea4d6214931f6a60e3b4477c883 | [
"MIT"
] | null | null | null | MLSD/Transformers/Bag_Transformers.py | HaoranXue/SDM | 376fb2b78ba5dea4d6214931f6a60e3b4477c883 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
from scipy.stats import skew, kurtosis
from sklearn.base import TransformerMixin
class BasicBag(TransformerMixin):
def __init__(self, mesh=False, Dreduction=None):
self.mesh = mesh
self.Dreduction = Dreduction
def fit(self, X, y=None, *args, **kwargs):
if self.mesh == False:
self.features = pd.DataFrame(
[
X.min(), X.max(), X.mean(), X.std(), X.apply(skew),
X.apply(kurtosis), X.apply(np.median)
],
index=X.index)
elif self.mesh == True:
def first_order_d(X):
return X[1:] - X[:-1]
def second_order_d(X):
first_order = first_order_d(X)
return first_order[1:] - first_order[:-1]
def fo_mean(X):
return np.mean(first_order_d(X))
def fo_std(X):
return np.std(first_order_d(X))
def fo_min(X):
return np.min(first_order_d(X))
def fo_max(X):
return np.max(first_order_d(X))
def fo_median(X):
return np.median(first_order_d(X))
def fo_skew(X):
return skew(first_order_d(X))
def fo_kurt(X):
return kurtosis(first_order_d(X))
def so_mean(X):
return np.mean(second_order_d(X))
def so_std(X):
return np.std(second_order_d(X))
def so_min(X):
return np.min(second_order_d(X))
def so_max(X):
return np.max(second_order_d(X))
def so_median(X):
return np.median(second_order_d(X))
def so_skew(X):
return skew(second_order_d(X))
def so_kurt(X):
return kurtosis(second_order_d(X))
self.features = pd.DataFrame(
[
X.min(), X.max(), X.mean(), X.std(), X.apply(skew),
X.apply(kurtosis), X.apply(np.median), X.apply(fo_mean),
X.apply(fo_std), X.apply(fo_min), X.apply(fo_median),
X.apply(fo_max), X.apply(fo_skew), X.apply(fo_kurt),
X.apply(so_mean), X.apply(so_std), X.apply(so_min),
X.apply(so_median), X.apply(so_max), X.apply(so_skew),
X.apply(so_kurt)
],
index=X.index)
def transform(self, X, y=None, *args, **kwargs):
if self.mesh == False:
self.features = pd.DataFrame(
[
X.min(), X.max(), X.mean(), X.std(), X.apply(skew),
X.apply(kurtosis), X.apply(np.median)
],
index=X.index)
elif self.mesh == True:
def first_order_d(X):
return X[1:] - X[:-1]
def second_order_d(X):
first_order = first_order_d(X)
return first_order[1:] - first_order[:-1]
def fo_mean(X):
return np.mean(first_order_d(X))
def fo_std(X):
return np.std(first_order_d(X))
def fo_min(X):
return np.min(first_order_d(X))
def fo_max(X):
return np.max(first_order_d(X))
def fo_median(X):
return np.median(first_order_d(X))
def fo_skew(X):
return skew(first_order_d(X))
def fo_kurt(X):
return kurtosis(first_order_d(X))
def so_mean(X):
return np.mean(second_order_d(X))
def so_std(X):
return np.std(second_order_d(X))
def so_min(X):
return np.min(second_order_d(X))
def so_max(X):
return np.max(second_order_d(X))
def so_median(X):
return np.median(second_order_d(X))
def so_skew(X):
return skew(second_order_d(X))
def so_kurt(X):
return kurtosis(second_order_d(X))
self.features = pd.DataFrame(
[
X.min(), X.max(), X.mean(), X.std(), X.apply(skew),
X.apply(kurtosis), X.apply(np.median), X.apply(fo_mean),
X.apply(fo_std), X.apply(fo_min), X.apply(fo_median),
X.apply(fo_max), X.apply(fo_skew), X.apply(fo_kurt),
X.apply(so_mean), X.apply(so_std), X.apply(so_min),
X.apply(so_median), X.apply(so_max), X.apply(so_skew),
X.apply(so_kurt)
],
index=X.index)
return self.features
| 30.0375 | 76 | 0.47836 | 638 | 4,806 | 3.393417 | 0.07837 | 0.110855 | 0.109931 | 0.120092 | 0.891455 | 0.891455 | 0.891455 | 0.891455 | 0.891455 | 0.891455 | 0 | 0.002773 | 0.399709 | 4,806 | 159 | 77 | 30.226415 | 0.74766 | 0 | 0 | 0.869565 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.304348 | false | 0 | 0.034783 | 0.26087 | 0.634783 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 9 |
67b4f5e8bf779e217af1a244835b9682fdf7ddad | 64 | py | Python | macchanger.py | ishangala16/Viewbot | 925cb7ee9bca210898b059adae8d3ae6194b5df8 | [
"MIT"
] | null | null | null | macchanger.py | ishangala16/Viewbot | 925cb7ee9bca210898b059adae8d3ae6194b5df8 | [
"MIT"
] | null | null | null | macchanger.py | ishangala16/Viewbot | 925cb7ee9bca210898b059adae8d3ae6194b5df8 | [
"MIT"
] | 1 | 2022-01-03T07:34:24.000Z | 2022-01-03T07:34:24.000Z | import os
os.system('spoof-mac.py set 00:00:00:00:00:00 wi-fi') | 32 | 53 | 0.703125 | 16 | 64 | 2.8125 | 0.625 | 0.444444 | 0.533333 | 0.533333 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0.206897 | 0.09375 | 64 | 2 | 53 | 32 | 0.568966 | 0 | 0 | 0 | 0 | 0 | 0.625 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 9 |
67f9e48b6a906fcedc3c327ed2a6d3e5bc7d330d | 104 | py | Python | src/hommmer/metrics/effect_share.py | hammer-mt/hommmer | a02cb87841395f30911242a019f28f6ac15f27ec | [
"MIT"
] | 4 | 2021-11-09T21:27:30.000Z | 2021-11-23T00:38:20.000Z | src/hommmer/metrics/effect_share.py | hammer-mt/hommmer | a02cb87841395f30911242a019f28f6ac15f27ec | [
"MIT"
] | null | null | null | src/hommmer/metrics/effect_share.py | hammer-mt/hommmer | a02cb87841395f30911242a019f28f6ac15f27ec | [
"MIT"
] | null | null | null | def effect_share(contribution_df):
return (contribution_df.sum()/contribution_df.sum().sum()).values | 52 | 69 | 0.778846 | 14 | 104 | 5.5 | 0.571429 | 0.545455 | 0.441558 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.067308 | 104 | 2 | 69 | 52 | 0.793814 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
e1fe31358bec23bb3a0d0579b90c95ea95693273 | 109,042 | py | Python | visprotocol/protocol/mc_protocol.py | ClandininLab/vis-protocol | d4438dccea3987b8f21648d439fe1c1349940024 | [
"MIT"
] | null | null | null | visprotocol/protocol/mc_protocol.py | ClandininLab/vis-protocol | d4438dccea3987b8f21648d439fe1c1349940024 | [
"MIT"
] | null | null | null | visprotocol/protocol/mc_protocol.py | ClandininLab/vis-protocol | d4438dccea3987b8f21648d439fe1c1349940024 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 21 10:20:02 2018
@author: minseung and mhturner
"""
from matplotlib.pyplot import pause
import numpy as np
import os
import flyrpc.multicall
import inspect
from time import sleep
import visprotocol
from visprotocol.protocol import clandinin_protocol
class BaseProtocol(clandinin_protocol.BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg) # call the parent class init method
def getMovingPatchParameters(self, center=None, angle=None, speed=None, width=None, height=None, color=None, distance_to_travel=None):
if center is None: center = self.adjustCenter(self.protocol_parameters['center'])
if angle is None: angle = self.protocol_parameters['angle']
if speed is None: speed = self.protocol_parameters['speed']
if width is None: width = self.protocol_parameters['width']
if height is None: height = self.protocol_parameters['height']
if color is None: color = self.protocol_parameters['color']
centerX = center[0]
centerY = center[1]
stim_time = self.run_parameters['stim_time']
if distance_to_travel is None: # distance_to_travel is set by speed and stim_time
distance_to_travel = speed * stim_time
# trajectory just has two points, at time=0 and time=stim_time
startX = (0, centerX - np.cos(np.radians(angle)) * distance_to_travel/2)
endX = (stim_time, centerX + np.cos(np.radians(angle)) * distance_to_travel/2)
startY = (0, centerY - np.sin(np.radians(angle)) * distance_to_travel/2)
endY = (stim_time, centerY + np.sin(np.radians(angle)) * distance_to_travel/2)
x = [startX, endX]
y = [startY, endY]
else: # distance_to_travel is specified, so only go that distance at the defined speed. Hang pre- and post- for any extra stim time
travel_time = distance_to_travel / speed
if travel_time > stim_time:
print('Warning: stim_time is too short to show whole trajectory at this speed!')
hang_time = 0
else:
hang_time = (stim_time - travel_time)/2
# split up hang time in pre and post such that trajectory always hits centerX,centerY at stim_time/2
x_1 = (0, centerX - np.cos(np.radians(angle)) * distance_to_travel/2)
x_2 = (hang_time, centerX - np.cos(np.radians(angle)) * distance_to_travel/2)
x_3 = (hang_time+travel_time, centerX + np.cos(np.radians(angle)) * distance_to_travel/2)
x_4 = (hang_time+travel_time+hang_time, centerX + np.cos(np.radians(angle)) * distance_to_travel/2)
y_1 = (0, centerY - np.sin(np.radians(angle)) * distance_to_travel/2)
y_2 = (hang_time, centerY - np.sin(np.radians(angle)) * distance_to_travel/2)
y_3 = (hang_time+travel_time, centerY + np.sin(np.radians(angle)) * distance_to_travel/2)
y_4 = (hang_time+travel_time+hang_time, centerY + np.sin(np.radians(angle)) * distance_to_travel/2)
x = [x_1, x_2, x_3, x_4]
y = [y_1, y_2, y_3, y_4]
x_trajectory = {'name': 'tv_pairs',
'tv_pairs': x,
'kind': 'linear'}
y_trajectory = {'name': 'tv_pairs',
'tv_pairs': y,
'kind': 'linear'}
patch_parameters = {'name': 'MovingPatch',
'width': width,
'height': height,
'color': color,
'theta': x_trajectory,
'phi': y_trajectory,
'angle': angle}
return patch_parameters
def getMovingSpotParameters(self, center=None, angle=None, speed=None, radius=None, color=None, distance_to_travel=None):
if center is None: center = self.protocol_parameters['center']
if angle is None: angle = self.protocol_parameters['angle']
if speed is None: speed = self.protocol_parameters['speed']
if radius is None: radius = self.protocol_parameters['radius']
if color is None: color = self.protocol_parameters['color']
center = self.adjustCenter(center)
centerX = center[0]
centerY = center[1]
stim_time = self.run_parameters['stim_time']
if distance_to_travel is None: # distance_to_travel is set by speed and stim_time
distance_to_travel = speed * stim_time
# trajectory just has two points, at time=0 and time=stim_time
startX = (0, centerX - np.cos(np.radians(angle)) * distance_to_travel/2)
endX = (stim_time, centerX + np.cos(np.radians(angle)) * distance_to_travel/2)
startY = (0, centerY - np.sin(np.radians(angle)) * distance_to_travel/2)
endY = (stim_time, centerY + np.sin(np.radians(angle)) * distance_to_travel/2)
x = [startX, endX]
y = [startY, endY]
else: # distance_to_travel is specified, so only go that distance at the defined speed. Hang pre- and post- for any extra stim time
travel_time = distance_to_travel / speed
if travel_time > stim_time:
print('Warning: stim_time is too short to show whole trajectory at this speed!')
hang_time = 0
else:
hang_time = (stim_time - travel_time)/2
# split up hang time in pre and post such that trajectory always hits centerX,centerY at stim_time/2
x_1 = (0, centerX - np.cos(np.radians(angle)) * distance_to_travel/2)
x_2 = (hang_time, centerX - np.cos(np.radians(angle)) * distance_to_travel/2)
x_3 = (hang_time+travel_time, centerX + np.cos(np.radians(angle)) * distance_to_travel/2)
x_4 = (hang_time+travel_time+hang_time, centerX + np.cos(np.radians(angle)) * distance_to_travel/2)
y_1 = (0, centerY - np.sin(np.radians(angle)) * distance_to_travel/2)
y_2 = (hang_time, centerY - np.sin(np.radians(angle)) * distance_to_travel/2)
y_3 = (hang_time+travel_time, centerY + np.sin(np.radians(angle)) * distance_to_travel/2)
y_4 = (hang_time+travel_time+hang_time, centerY + np.sin(np.radians(angle)) * distance_to_travel/2)
x = [x_1, x_2, x_3, x_4]
y = [y_1, y_2, y_3, y_4]
x_trajectory = {'name': 'tv_pairs',
'tv_pairs': x,
'kind': 'linear'}
y_trajectory = {'name': 'tv_pairs',
'tv_pairs': y,
'kind': 'linear'}
spot_parameters = {'name': 'MovingSpot',
'radius': radius,
'color': color,
'theta': x_trajectory,
'phi': y_trajectory}
return spot_parameters
# %%
def getOcclusionWithPauseParameters(self, center=None, start_theta=None, bar_width=None, bar_height=None, bar_prime_color=None, bar_probe_color=None, bar_speed=None, occluder_height=None, occluder_color=None,
preprime_duration=None, prime_duration=None, occlusion_duration=None, pause_duration=None, probe_duration=None, render_on_cylinder=None, bar_surface_radius=None, occluder_surface_radius=None):
if center is None: center = self.adjustCenter(self.protocol_parameters['center'])
if start_theta is None: start_theta = self.protocol_parameters['start_theta'] #negative value starts from the opposite side of bar direction
if bar_width is None: bar_width = self.protocol_parameters['bar_width']
if bar_height is None: bar_height = self.protocol_parameters['bar_height']
if bar_prime_color is None: bar_prime_color = self.protocol_parameters['bar_prime_color']
if bar_probe_color is None: bar_probe_color = self.protocol_parameters['bar_probe_color']
if bar_speed is None: bar_speed = self.protocol_parameters['bar_speed']
if occluder_height is None: occluder_height = self.protocol_parameters['occluder_height']
if occluder_color is None: occluder_color = self.protocol_parameters['occluder_color']
if preprime_duration is None: preprime_duration = self.protocol_parameters['preprime_duration']
if prime_duration is None: prime_duration = self.protocol_parameters['prime_duration']
if occlusion_duration is None: occlusion_duration = self.protocol_parameters['occlusion_duration']
if pause_duration is None: pause_duration = self.protocol_parameters['pause_duration']
if probe_duration is None: probe_duration = self.protocol_parameters['probe_duration']
if render_on_cylinder is None: render_on_cylinder = self.protocol_parameters['render_on_cylinder']
if bar_surface_radius is None: bar_surface_radius = self.protocol_parameters['bar_surface_radius']
if occluder_surface_radius is None: occluder_surface_radius = self.protocol_parameters['occluder_surface_radius']
centerX = center[0]
# Stimulus construction
stim_duration = preprime_duration + prime_duration + occlusion_duration + probe_duration + pause_duration
# consistent bar trajectory
start_theta *= np.sign(bar_speed)
time = [0, preprime_duration]
x = [start_theta, start_theta]
bar_color = [bar_prime_color, bar_prime_color]
prime_movement = bar_speed * (prime_duration + occlusion_duration)
prime_end_theta = start_theta + prime_movement
prime_end_time = preprime_duration + prime_duration + occlusion_duration
time.append(prime_end_time)
x.append(prime_end_theta)
bar_color.append(bar_prime_color)
pause_end_theta = prime_end_theta
pause_end_time = prime_end_time + pause_duration
time.append(pause_end_time)
x.append(pause_end_theta)
bar_color.append(bar_probe_color)
probe_movement = bar_speed * probe_duration
probe_end_theta = pause_end_theta + probe_movement
probe_end_time = pause_end_time + probe_duration
time.append(probe_end_time)
x.append(probe_end_theta)
bar_color.append(bar_probe_color)
# Compute location and width of the occluder per specification
occlusion_start_theta = start_theta + bar_speed * prime_duration
occluder_width = np.abs(bar_speed) * occlusion_duration + bar_width # the last term ensures that the bar is completely hidden during the occlusion period
occluder_loc = occlusion_start_theta + np.sign(bar_speed) * (occluder_width/2 - bar_width/2) # the last two terms account for widths of the bar and the occluder, such that the bar is completely hidden during occlusion period
occluder_time = [0, stim_duration]
occluder_x = [occluder_loc, occluder_loc]
# bar_traj_r = list(zip(time, (centerX - np.array(x)).tolist()))
# occluder_traj_r = list(zip(occluder_time, (centerX - np.array(occluder_x)).tolist()))
# bar_traj_l = list(zip(time, (centerX + np.array(x)).tolist()))
# occluder_traj_l = list(zip(occluder_time, (centerX + np.array(occluder_x)).tolist()))
# Create flystim trajectory objects
bar_theta_traj = {'name': 'tv_pairs', 'tv_pairs': list(zip(time, (centerX + np.array(x)).tolist())), 'kind': 'linear'}
bar_color_traj = {'name': 'tv_pairs', 'tv_pairs': list(zip(time, bar_color)), 'kind': 'linear'}
occluder_theta_traj = {'name': 'tv_pairs', 'tv_pairs': list(zip(occluder_time, (centerX + np.array(occluder_x)).tolist())), 'kind': 'linear'}
if render_on_cylinder:
bar_parameters = {'name': 'MovingPatchOnCylinder',
'width': bar_width,
'height': bar_height,
'color': bar_color_traj,
'theta': bar_theta_traj,
'phi': 0,
'angle': 0,
'cylinder_radius': bar_surface_radius}
occluder_parameters = {'name': 'MovingPatchOnCylinder',
'width': occluder_width,
'height': occluder_height,
'color': occluder_color,
'theta': occluder_theta_traj,
'phi': 0,
'angle': 0,
'cylinder_radius': occluder_surface_radius}
else:
bar_parameters = {'name': 'MovingPatch',
'width': bar_width,
'height': bar_height,
'color': bar_color_traj,
'theta': bar_theta_traj,
'phi': 0,
'angle': 0,
'sphere_radius': bar_surface_radius}
occluder_parameters = {'name': 'MovingPatch',
'width': occluder_width,
'height': occluder_height,
'color': occluder_color,
'theta': occluder_theta_traj,
'phi': 0,
'angle': 0,
'sphere_radius': occluder_surface_radius}
return bar_parameters, occluder_parameters, stim_duration
# %%
def getOcclusionFixedParameters(self, center=None, bar_start_theta=None, bar_end_theta=None, bar_width=None, bar_height=None,
bar_prime_color=None, bar_probe_color=None, bar_speed=None,
occluder_theta=None, occluder_width=None, occluder_height=None, occluder_color=None,
preprime_duration=None, pause_duration=None, render_on_cylinder=None,
bar_surface_radius=None, occluder_surface_radius=None):
if center is None: center = self.adjustCenter(self.protocol_parameters['center'])
if bar_start_theta is None: bar_start_theta = self.protocol_parameters['bar_start_theta'] #negative value starts from the opposite side of bar direction
if bar_end_theta is None: bar_end_theta = self.protocol_parameters['bar_end_theta'] #negative value starts from the opposite side of bar direction
if bar_width is None: bar_width = self.protocol_parameters['bar_width']
if bar_height is None: bar_height = self.protocol_parameters['bar_height']
if bar_prime_color is None: bar_prime_color = self.protocol_parameters['bar_prime_color']
if bar_probe_color is None: bar_probe_color = self.protocol_parameters['bar_probe_color']
if bar_speed is None: bar_speed = self.protocol_parameters['bar_speed']
if occluder_theta is None: occluder_theta = self.protocol_parameters['occluder_theta']
if occluder_width is None: occluder_width = self.protocol_parameters['occluder_width']
if occluder_height is None: occluder_height = self.protocol_parameters['occluder_height']
if occluder_color is None: occluder_color = self.protocol_parameters['occluder_color']
if preprime_duration is None: preprime_duration = self.protocol_parameters['preprime_duration']
if pause_duration is None: pause_duration = self.protocol_parameters['pause_duration']
if render_on_cylinder is None: render_on_cylinder = self.protocol_parameters['render_on_cylinder']
if bar_surface_radius is None: bar_surface_radius = self.protocol_parameters['bar_surface_radius']
if occluder_surface_radius is None: occluder_surface_radius = self.protocol_parameters['occluder_surface_radius']
centerX = center[0]
# Stimulus construction
bar_start_theta *= np.sign(bar_speed)
bar_end_theta *= np.sign(bar_speed)
occluder_theta *= np.sign(bar_speed)
# Bar
theta_distance = np.abs(bar_end_theta - bar_start_theta)
prime_distance = np.abs(occluder_theta - bar_start_theta)
prime_duration = prime_distance / np.abs(bar_speed)
probe_distance = np.abs(bar_end_theta - occluder_theta)
probe_duration = probe_distance / np.abs(bar_speed)
bar_duration_wo_pause = theta_distance / np.abs(bar_speed)
bar_duration_w_pause = bar_duration_wo_pause + pause_duration
stim_duration = preprime_duration + bar_duration_w_pause
# Bar trajectory
time = [0,
preprime_duration,
preprime_duration+prime_duration,
preprime_duration+prime_duration+pause_duration,
stim_duration]
x = [bar_start_theta,
bar_start_theta,
occluder_theta,
occluder_theta,
bar_end_theta]
bar_color = [bar_prime_color,
bar_prime_color,
bar_prime_color,
bar_probe_color,
bar_probe_color]
# Occluder trajectory
occluder_time = [0, stim_duration]
occluder_x = [occluder_theta, occluder_theta]
# Create flystim trajectory objects
bar_theta_traj = {'name': 'tv_pairs', 'tv_pairs': list(zip(time, (centerX + np.array(x)).tolist())), 'kind': 'linear'}
bar_color_traj = {'name': 'tv_pairs', 'tv_pairs': list(zip(time, bar_color)), 'kind': 'previous'}
occluder_theta_traj = {'name': 'tv_pairs', 'tv_pairs': list(zip(occluder_time, (centerX + np.array(occluder_x)).tolist())), 'kind': 'linear'}
if render_on_cylinder:
bar_parameters = {'name': 'MovingPatchOnCylinder',
'width': bar_width,
'height': bar_height,
'color': bar_color_traj,
'theta': bar_theta_traj,
'phi': 0,
'angle': 0,
'cylinder_radius': bar_surface_radius}
occluder_parameters = {'name': 'MovingPatchOnCylinder',
'width': occluder_width,
'height': occluder_height,
'color': occluder_color,
'theta': occluder_theta_traj,
'phi': 0,
'angle': 0,
'cylinder_radius': occluder_surface_radius}
else:
bar_parameters = {'name': 'MovingPatch',
'width': bar_width,
'height': bar_height,
'color': bar_color_traj,
'theta': bar_theta_traj,
'phi': 0,
'angle': 0,
'sphere_radius': bar_surface_radius}
occluder_parameters = {'name': 'MovingPatch',
'width': occluder_width,
'height': occluder_height,
'color': occluder_color,
'theta': occluder_theta_traj,
'phi': 0,
'angle': 0,
'sphere_radius': occluder_surface_radius}
return bar_parameters, occluder_parameters, stim_duration
# %%
"""
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # FLY-CENTERED STIMS # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
class OcclusionWithPause(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_bar_width, current_bar_prime_color, current_bar_probe_color, current_bar_speed, current_occluder_color, current_occlusion_duration, current_pause_duration = self.selectParametersFromLists((self.protocol_parameters['bar_width'], self.protocol_parameters['bar_prime_color'], self.protocol_parameters['bar_probe_color'], self.protocol_parameters['bar_speed'], self.protocol_parameters['occluder_color'], self.protocol_parameters['occlusion_duration'], self.protocol_parameters['pause_duration']), randomize_order=self.protocol_parameters['randomize_order'])
bar_parameters, occluder_parameters, stim_duration = self.getOcclusionWithPauseParameters(bar_width=current_bar_width, bar_prime_color=current_bar_prime_color, bar_probe_color=current_bar_probe_color, bar_speed=current_bar_speed, occluder_color=current_occluder_color, occlusion_duration=current_occlusion_duration, pause_duration=current_pause_duration)
self.epoch_parameters = (bar_parameters, occluder_parameters)
self.convenience_parameters = {'current_bar_width': current_bar_width,
'current_bar_prime_color': current_bar_prime_color,
'current_bar_probe_color': current_bar_probe_color,
'current_bar_speed': current_bar_speed,
'current_occluder_color': current_occluder_color,
'current_occlusion_duration': current_occlusion_duration,
'current_pause_duration': current_pause_duration,
'current_stim_duration': stim_duration}
def loadStimuli(self, client):
bar_parameters = self.epoch_parameters[0].copy()
occluder_parameters = self.epoch_parameters[1].copy()
self.run_parameters['stim_time'] = self.convenience_parameters['current_stim_duration']
bg = self.run_parameters.get('idle_color')
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.load_stim(name='ConstantBackground', color=[bg,bg,bg,1], side_length=200)
multicall.load_stim(**bar_parameters, hold=True)
multicall.load_stim(**occluder_parameters, hold=True)
multicall()
def startStimuli(self, client, append_stim_frames=False, print_profile=True):
sleep(self.run_parameters['pre_time'])
multicall = flyrpc.multicall.MyMultiCall(client.manager)
# stim time
multicall.start_stim(append_stim_frames=append_stim_frames)
multicall.start_corner_square()
multicall()
sleep(self.convenience_parameters['current_stim_duration'])
# tail time
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.stop_stim(print_profile=print_profile)
multicall.black_corner_square()
multicall()
sleep(self.run_parameters['tail_time'])
def getParameterDefaults(self):
self.protocol_parameters = {'center': [0, 0],
'start_theta': -90.0,
'bar_width': 15.0,
'bar_height': 150.0,
'bar_prime_color': [1.0, 0.0],
'bar_probe_color': 1.0,
'bar_speed': [-35.0, -25.0, -15.0, 15.0, 25.0, 35.0],
'occluder_height': 170.0,
'occluder_color': self.run_parameters.get('idle_color'),
'preprime_duration': 0.0,
'prime_duration': 2.0,
'occlusion_duration': [0.5, 2.0],
'pause_duration': [0.0, 1.0],
'probe_duration': 1.5,
'render_on_cylinder': False,
'bar_surface_radius': 3.0,
'occluder_surface_radius': 2.0,
'randomize_order': True,}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'OcclusionWithPause',
'num_epochs': 240, # 12 x 20 each
'pre_time': 1.0,
'tail_time': 1.0,
'idle_color': 0.0,
'stim_time': 5.5}
# %%
class OcclusionFixed(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_bar_start_theta, current_bar_width, current_bar_prime_color, current_bar_probe_color, current_bar_speed, current_occluder_color, current_pause_duration = self.selectParametersFromLists((self.protocol_parameters['bar_start_theta'],self.protocol_parameters['bar_width'], self.protocol_parameters['bar_prime_color'], self.protocol_parameters['bar_probe_color'], self.protocol_parameters['bar_speed'], self.protocol_parameters['occluder_color'], self.protocol_parameters['pause_duration']), randomize_order=self.protocol_parameters['randomize_order'])
bar_parameters, occluder_parameters, stim_duration = self.getOcclusionFixedParameters(bar_start_theta=current_bar_start_theta, bar_width=current_bar_width, bar_prime_color=current_bar_prime_color, bar_probe_color=current_bar_probe_color, bar_speed=current_bar_speed, occluder_color=current_occluder_color, pause_duration=current_pause_duration)
self.epoch_parameters = (bar_parameters, occluder_parameters)
self.convenience_parameters = {'current_bar_width': current_bar_width,
'current_bar_prime_color': current_bar_prime_color,
'current_bar_probe_color': current_bar_probe_color,
'current_bar_speed': current_bar_speed,
'current_occluder_color': current_occluder_color,
'current_pause_duration': current_pause_duration,
'current_stim_duration': stim_duration}
def loadStimuli(self, client):
bar_parameters = self.epoch_parameters[0].copy()
occluder_parameters = self.epoch_parameters[1].copy()
self.run_parameters['stim_time'] = self.convenience_parameters['current_stim_duration']
bg = self.run_parameters.get('idle_color')
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.load_stim(name='ConstantBackground', color=[bg,bg,bg,1], side_length=200)
multicall.load_stim(**bar_parameters, hold=True)
multicall.load_stim(**occluder_parameters, hold=True)
multicall()
def startStimuli(self, client, append_stim_frames=False, print_profile=True):
sleep(self.run_parameters['pre_time'])
multicall = flyrpc.multicall.MyMultiCall(client.manager)
# stim time
multicall.start_stim(append_stim_frames=append_stim_frames)
multicall.start_corner_square()
multicall()
sleep(self.convenience_parameters['current_stim_duration'])
# tail time
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.stop_stim(print_profile=print_profile)
multicall.black_corner_square()
multicall()
sleep(self.run_parameters['tail_time'])
def getParameterDefaults(self):
self.protocol_parameters = {'center': [0, 0],
'bar_start_theta': [90.0],
'bar_end_theta': 0.0,
'bar_width': 15.0,
'bar_height': 50.0,
'bar_prime_color': [1.0],
'bar_probe_color': 1.0,
'bar_speed': [15.0, -15.0],
'occluder_theta': 60.0,
'occluder_width': 30.0,
'occluder_height': 170.0,
'occluder_color': [0.0],
'preprime_duration': 0.0,
'pause_duration': [0.0],
'render_on_cylinder': True,
'bar_surface_radius': 3.0,
'occluder_surface_radius': 2.0,
'randomize_order': True,}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'OcclusionFixed',
'num_epochs': 240, # 12 x 20 each
'pre_time': 1.0,
'tail_time': 1.0,
'idle_color': 0.0,
'stim_time': 5.5}
# %%
class SphericalCheckerboardWhiteNoise(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
stimulus_ID = 'RandomGridOnSphericalPatch'
adj_center = self.adjustCenter(self.protocol_parameters['center'])
start_seed = int(np.random.choice(range(int(1e6))))
distribution_data = {'name': 'Ternary',
'args': [],
'kwargs': {'rand_min': self.protocol_parameters['rand_min'],
'rand_max': self.protocol_parameters['rand_max']}}
self.epoch_parameters = {'name': stimulus_ID,
'patch_width': self.protocol_parameters['patch_size'],
'patch_height': self.protocol_parameters['patch_size'],
'width': self.protocol_parameters['grid_width'],
'height': self.protocol_parameters['grid_height'],
'start_seed': start_seed,
'update_rate': self.protocol_parameters['update_rate'],
'distribution_data': distribution_data,
'theta': adj_center[0],
'phi': adj_center[1]}
self.convenience_parameters = {'start_seed': start_seed}
def getParameterDefaults(self):
self.protocol_parameters = {'patch_size': 5.0,
'update_rate': 20.0,
'rand_min': 0.0,
'rand_max': 1.0,
'grid_width': 60,
'grid_height': 60,
'center': [0.0, 0.0]}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'SphericalCheckerboardWhiteNoise',
'num_epochs': 10,
'pre_time': 2.0,
'stim_time': 30.0,
'tail_time': 2.0,
'idle_color': 0.5}
# %%
# class ContrastReversingGrating(BaseProtocol):
# def __init__(self, cfg):
# super().__init__(cfg)
#
# self.getRunParameterDefaults()
# self.getParameterDefaults()
#
# def getEpochParameters(self):
# # TODO: center size with aperture (center and center_size): maybe parent class aperture method?
# current_temporal_frequency = self.selectParametersFromLists(self.protocol_parameters['temporal_frequency'], randomize_order=self.protocol_parameters['randomize_order'])
#
# # Make the contrast trajectory
# contrast_traj = {'name': 'Sinusoid',
# 'temporal_frequency': current_temporal_frequency,
# 'amplitude': self.protocol_parameters['contrast'],
# 'offset': 0}
#
# self.epoch_parameters = {'name': 'CylindricalGrating',
# 'period': self.protocol_parameters['spatial_period'],
# 'color': [1, 1, 1, 1],
# 'mean': self.protocol_parameters['mean'],
# 'contrast': contrast_traj,
# 'angle': self.protocol_parameters['angle'],
# 'offset': 0.0,
# 'cylinder_radius': 1.0,
# 'cylinder_height': 10.0,
# 'profile': 'square',
# 'theta': self.screen_center[0]}
#
# self.convenience_parameters = {'current_temporal_frequency': current_temporal_frequency}
#
# self.meta_parameters = {'center_size': self.protocol_parameters['center_size'],
# 'center': self.adjustCenter(self.protocol_parameters['center'])}
#
# def getParameterDefaults(self):
# self.protocol_parameters = {'spatial_period': 20.0,
# 'contrast': 1.0,
# 'mean': 0.5,
# 'temporal_frequency': [0.5, 1.0, 2.0, 4.0, 8.0, 16.0],
# 'center': [0, 0],
# 'center_size': 40.0,
# 'angle': 0.0,
# 'randomize_order': True}
#
# def getRunParameterDefaults(self):
# self.run_parameters = {'protocol_ID': 'ContrastReversingGrating',
# 'num_epochs': 40,
# 'pre_time': 1.0,
# 'stim_time': 4.0,
# 'tail_time': 1.0,
# 'idle_color': 0.5}
# %%
class ExpandingEdges(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_rate, current_expand_dark = self.selectParametersFromLists((self.protocol_parameters['rate'], self.protocol_parameters['expand_dark']), randomize_order = self.protocol_parameters['randomize_order'])
self.epoch_parameters = {'name': 'ExpandingEdges',
'period': self.protocol_parameters['period'],
'rate': current_rate,
'expander_color': self.protocol_parameters['dark_color'] if current_expand_dark else self.protocol_parameters['light_color'],
'opposite_color': self.protocol_parameters['light_color'] if current_expand_dark else self.protocol_parameters['dark_color'],
'width_0': self.protocol_parameters['width_0'],
'hold_duration': self.protocol_parameters['hold_duration'],
'color': [1, 1, 1, 1],
'n_theta_pixels': self.protocol_parameters['n_theta_pixels'],
'cylinder_radius': 1,
'vert_extent': self.protocol_parameters['vert_extent'],
'theta_offset': self.protocol_parameters['theta_offset'],
'theta': self.screen_center[0]}
self.convenience_parameters = {'current_rate': current_rate, 'current_expand_dark': current_expand_dark}
self.meta_parameters = {'center': self.adjustCenter(self.protocol_parameters['center'])}
def getParameterDefaults(self):
self.protocol_parameters = {'period': 40.0,
'rate': [-80.0, 80.0],
'vert_extent': 80.0,
'theta_offset': 0.0,
'light_color': 1.0,
'dark_color': 0.0,
'expand_dark': [0,1],
'width_0': 2,
'hold_duration': 0.550,
'n_theta_pixels': 5760,
'center': [0, 0],
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'ExpandingEdges',
'num_epochs': 400,
'pre_time': 1.0,
'stim_time': 0.800, # 0.550 hold then 0.250 rotate
'tail_time': 1.0,
'idle_color': 0.5}
class DriftingSquareGrating(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
# TODO: center size with aperture (center and center_size)
current_angle = self.selectParametersFromLists(self.protocol_parameters['angle'], randomize_order = self.protocol_parameters['randomize_order'])
self.epoch_parameters = {'name': 'RotatingGrating',
'period': self.protocol_parameters['period'],
'rate': self.protocol_parameters['rate'],
'hold_duration': self.protocol_parameters['hold_duration'],
'color': [1, 1, 1, 1],
'mean': self.protocol_parameters['mean'],
'contrast': self.protocol_parameters['contrast'],
'angle': current_angle,
'offset': 0.0,
'cylinder_radius': 1,
'cylinder_height': 10,
'profile': 'square',
'theta': self.screen_center[0]}
self.convenience_parameters = {'current_angle': current_angle}
self.meta_parameters = {'center_size': self.protocol_parameters['center_size'],
'center': self.adjustCenter(self.protocol_parameters['center'])}
def getParameterDefaults(self):
self.protocol_parameters = {'period': 40.0,
'rate': 80.0,
'contrast': 1.0,
'mean': 0.5,
'angle': [0.0, 180.0],
'hold_duration': 0.550,
'center': [0, 0],
'center_size': 180.0,
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'DriftingSquareGrating',
'num_epochs': 40,
'pre_time': 1.0,
'stim_time': 4.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
class SplitDriftingSquareGrating(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def loadStimuli(self, client):
passed_parameters_0 = self.epoch_parameters_0.copy()
passed_parameters_1 = self.epoch_parameters_1.copy()
bg = self.run_parameters.get('idle_color')
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.load_stim(name='ConstantBackground', color=[bg,bg,bg,1], side_length=200)
multicall.load_stim(**passed_parameters_0, hold=True)
multicall.load_stim(**passed_parameters_1, hold=True)
multicall()
def getEpochParameters(self):
# TODO: center size with aperture (center and center_size)
current_angle = self.selectParametersFromLists(self.protocol_parameters['angle'], randomize_order = self.protocol_parameters['randomize_order'])
self.epoch_parameters_0 = {'name': 'RotatingGrating',
'period': self.protocol_parameters['period'],
'rate': self.protocol_parameters['rate'],
'color': [1, 1, 1, 1],
'mean': self.protocol_parameters['mean'],
'contrast': self.protocol_parameters['contrast'],
'angle': current_angle,
'offset': 0.0,
'cylinder_radius': 1,
'cylinder_location': (self.protocol_parameters['cylinder_xshift'],0,0),
'cylinder_height': 10,
'profile': 'square',
'theta': self.screen_center[0]}
self.epoch_parameters_1 = {'name': 'RotatingGrating',
'period': self.protocol_parameters['period'],
'rate': self.protocol_parameters['rate'], #-
'color': [1, 1, 1, 1],
'mean': self.protocol_parameters['mean'],
'contrast': self.protocol_parameters['contrast'],
'angle': current_angle+180.0, #remove
'offset': 0.0, #change this??
'cylinder_radius': 1,
'cylinder_location': (self.protocol_parameters['cylinder_xshift'],0,0), #-
'cylinder_height': 10,
'profile': 'square',
'theta': self.screen_center[0]}
# self.epoch_parameters_1 = self.epoch_parameters_0.copy()
# self.epoch_parameters_1['cylinder_location'] = (-self.protocol_parameters['cylinder_xshift'],0,0)
self.convenience_parameters = {'current_angle': current_angle}
self.meta_parameters = {'center_size': self.protocol_parameters['center_size'],
'center': self.adjustCenter(self.protocol_parameters['center'])}
def getParameterDefaults(self):
self.protocol_parameters = {'period': 20.0,
'rate': 20.0,
'contrast': 1.0,
'mean': 0.5,
'angle': [0.0, 180.0],
'center': [0, 0],
'center_size': 180.0,
'cylinder_xshift': -0.001,
'randomize_order': True,
}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'SplitDriftingSquareGrating',
'num_epochs': 40,
'pre_time': 1.0,
'stim_time': 4.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
class ExpandingMovingSpot(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_diameter, current_intensity, current_speed = self.selectParametersFromLists((self.protocol_parameters['diameter'], self.protocol_parameters['intensity'], self.protocol_parameters['speed']), randomize_order=self.protocol_parameters['randomize_order'])
self.epoch_parameters = self.getMovingSpotParameters(radius=current_diameter/2,
color=current_intensity,
speed=current_speed)
self.convenience_parameters = {'current_diameter': current_diameter,
'current_intensity': current_intensity,
'current_speed': current_speed}
def getParameterDefaults(self):
self.protocol_parameters = {'diameter': [2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 15.0, 20.0, 25.0, 30.0, 35.0, 40.0, 45.0, 50.0],
'intensity': [0.0, 1.0],
'center': [0, 0],
'speed': [80.0],
'angle': 0.0,
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'ExpandingMovingSpot',
'num_epochs': 70,
'pre_time': 0.5,
'stim_time': 3.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
class FlickeringVertBars(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_temporal_frequency, current_theta_loc = self.selectParametersFromLists((self.protocol_parameters['temporal_frequency'], self.protocol_parameters['theta_loc']), randomize_order=self.protocol_parameters['randomize_order'])
# make color trajectory
color_traj = {'name': 'Sinusoid',
'temporal_frequency': current_temporal_frequency,
'amplitude': self.protocol_parameters['mean'] * self.protocol_parameters['contrast'],
'offset': self.protocol_parameters['mean']}
if self.protocol_parameters['render_on_cylinder']:
self.epoch_parameters = {'name': 'MovingPatchOnCylinder',
'width': self.protocol_parameters['width'],
'height': self.protocol_parameters['height'],
'cylinder_radius': 1,
'color': color_traj,
'theta': current_theta_loc,
'phi': self.protocol_parameters['phi_loc'],
'angle': 0}
else:
self.epoch_parameters = {'name': 'MovingPatch',
'width': self.protocol_parameters['width'],
'height': self.protocol_parameters['height'],
'sphere_radius': 1,
'color': color_traj,
'theta': current_theta_loc,
'phi': self.protocol_parameters['phi_loc'],
'angle': 0}
self.convenience_parameters = {'current_temporal_frequency': current_temporal_frequency,
'current_theta_loc': current_theta_loc}
def getParameterDefaults(self):
self.protocol_parameters = {'height': 150.0,
'width': 10.0,
'theta_loc': [0.0, 10.0],
'phi_loc': 0,
'contrast': 1.0,
'mean': 0.5,
'temporal_frequency': [10.0],
'render_on_cylinder': False,
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'FlickeringVertBars',
'num_epochs': 30,
'pre_time': 1.0,
'stim_time': 4.0,
'tail_time': 1.0,
'idle_color': 0.5}
class FlickeringPatch(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_temporal_frequency, current_center = self.selectParametersFromLists((self.protocol_parameters['temporal_frequency'], self.protocol_parameters['center']), randomize_order=self.protocol_parameters['randomize_order'])
adj_center = self.adjustCenter(current_center)
# make color trajectory
color_traj = {'name': 'Sinusoid',
'temporal_frequency': current_temporal_frequency,
'amplitude': self.protocol_parameters['mean'] * self.protocol_parameters['contrast'],
'offset': self.protocol_parameters['mean']}
if self.protocol_parameters['render_on_cylinder']:
self.epoch_parameters = {'name': 'MovingPatchOnCylinder',
'width': self.protocol_parameters['width'],
'height': self.protocol_parameters['height'],
'cylinder_radius': 1,
'color': color_traj,
'theta': adj_center[0],
'phi': adj_center[1],
'angle': 0}
else:
self.epoch_parameters = {'name': 'MovingPatch',
'width': self.protocol_parameters['width'],
'height': self.protocol_parameters['height'],
'sphere_radius': 1,
'color': color_traj,
'theta': adj_center[0],
'phi': adj_center[1],
'angle': 0}
self.convenience_parameters = {'current_temporal_frequency': current_temporal_frequency,
'current_center': current_center}
def getParameterDefaults(self):
self.protocol_parameters = {'height': 10.0,
'width': 10.0,
'center': [[0,0], [10,0]],
'contrast': 1.0,
'mean': 0.5,
'temporal_frequency': [10.0],
'render_on_cylinder': False,
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'FlickeringPatch',
'num_epochs': 30,
'pre_time': 1.0,
'stim_time': 4.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
class LoomingSpot(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
stim_time = self.run_parameters['stim_time']
start_size = self.protocol_parameters['start_size']
end_size = self.protocol_parameters['end_size']
# adjust center to screen center
adj_center = self.adjustCenter(self.protocol_parameters['center'])
rv_ratio = self.protocol_parameters['rv_ratio'] # msec
current_rv_ratio = self.selectParametersFromLists(rv_ratio, randomize_order=self.protocol_parameters['randomize_order'])
current_rv_ratio = current_rv_ratio / 1e3 # msec -> sec
r_traj = {'name': 'Loom',
'rv_ratio': current_rv_ratio,
'stim_time': stim_time,
'start_size': start_size,
'end_size': end_size}
self.epoch_parameters = {'name': 'MovingSpot',
'radius': r_traj,
'sphere_radius': 1,
'color': self.protocol_parameters['intensity'],
'theta': adj_center[0],
'phi': adj_center[1]}
self.convenience_parameters = {'current_rv_ratio': current_rv_ratio}
def getParameterDefaults(self):
self.protocol_parameters = {'intensity': 0.0,
'center': [0, 0],
'start_size': 2.5,
'end_size': 80.0,
'rv_ratio': [5.0, 10.0, 20.0, 40.0, 80.0],
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'LoomingSpot',
'num_epochs': 75,
'pre_time': 0.5,
'stim_time': 1.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
class MovingSpotOnDriftingGrating(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_spot_speed, current_grate_rate = self.selectParametersFromLists((self.protocol_parameters['spot_speed'], self.protocol_parameters['grate_rate']),
all_combinations = True,
randomize_order = self.protocol_parameters['randomize_order'])
patch_parameters = self.getMovingSpotParameters(speed = current_spot_speed,
radius = self.protocol_parameters['spot_radius'],
color = self.protocol_parameters['spot_color'],
distance_to_travel = 180)
grate_parameters = {'name': 'RotatingGrating',
'period': self.protocol_parameters['grate_period'],
'rate': current_grate_rate,
'color': [1, 1, 1, 1],
'mean': self.run_parameters['idle_color'],
'contrast': self.protocol_parameters['grate_contrast'],
'angle': self.protocol_parameters['angle'],
'offset': 0.0,
'cylinder_radius': 1.1,
'cylinder_height': 20,
'profile': 'square',
'theta': self.screen_center[0]}
self.epoch_parameters = (grate_parameters, patch_parameters)
self.convenience_parameters = {'current_spot_speed': current_spot_speed,
'current_grate_rate': current_grate_rate}
def loadStimuli(self, client):
grate_parameters = self.epoch_parameters[0].copy()
patch_parameters = self.epoch_parameters[1].copy()
bg = self.run_parameters.get('idle_color')
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.load_stim(**grate_parameters, hold=True)
multicall.load_stim(**patch_parameters, hold=True)
multicall()
def getParameterDefaults(self):
self.protocol_parameters = {'center': [0, 0],
'spot_radius': 10.0,
'spot_color': 0.0,
'spot_speed': [30.0, 60.0, 90.0],
'grate_period': 10.0,
'grate_rate': [-120.0, -90.0, -60.0, -30.0, -15.0, 0.0,
15.0, 30.0, 60.0, 90.0, 120.0],
'grate_contrast': 0.5,
'angle': 0.0,
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'MovingSpotOnDriftingGrating',
'num_epochs': 165,
'pre_time': 1.0,
'stim_time': 6.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
class SurroundGratingTuning(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_spot_speed, current_grate_rate, current_grate_period = self.selectParametersFromLists((self.protocol_parameters['spot_speed'], self.protocol_parameters['grate_rate'], self.protocol_parameters['grate_period']),
all_combinations=True,
randomize_order=self.protocol_parameters['randomize_order'])
patch_parameters = self.getMovingSpotParameters(speed=current_spot_speed,
radius=self.protocol_parameters['spot_radius'],
color=self.protocol_parameters['spot_color'],
distance_to_travel=180)
grate_parameters = {'name': 'RotatingGrating',
'period': current_grate_period,
'rate': current_grate_rate,
'color': [1, 1, 1, 1],
'mean': self.run_parameters['idle_color'],
'contrast': self.protocol_parameters['grate_contrast'],
'angle': self.protocol_parameters['angle'],
'offset': 0.0,
'cylinder_radius': 1.1,
'cylinder_height': 20,
'profile': 'sine',
'theta': self.screen_center[0]}
self.epoch_parameters = (grate_parameters, patch_parameters)
self.convenience_parameters = {'current_spot_speed': current_spot_speed,
'current_grate_rate': current_grate_rate,
'current_grate_period': current_grate_period}
def loadStimuli(self, client):
grate_parameters = self.epoch_parameters[0].copy()
patch_parameters = self.epoch_parameters[1].copy()
bg = self.run_parameters.get('idle_color')
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.load_stim(**grate_parameters, hold=True)
multicall.load_stim(**patch_parameters, hold=True)
multicall()
def getParameterDefaults(self):
self.protocol_parameters = {'center': [0, 0],
'spot_radius': 7.5,
'spot_color': 0.0,
'spot_speed': [60.0],
'grate_period': [10.0, 20.0, 30.0, 40.0],
'grate_rate': [-120.0, -90.0, -60.0, -30.0, -15.0, 0.0,
15.0, 30.0, 60.0, 90.0, 120.0],
'grate_contrast': 0.5,
'angle': 0.0,
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'SurroundGratingTuning',
'num_epochs': 220, # 11 x 5 avgs each
'pre_time': 1.0,
'stim_time': 3.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
class MovingRectangle(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_intensity, current_angle = self.selectParametersFromLists((self.protocol_parameters['intensity'], self.protocol_parameters['angle']), randomize_order=self.protocol_parameters['randomize_order'])
self.epoch_parameters = self.getMovingPatchParameters(angle=current_angle, color=current_intensity)
self.convenience_parameters = {'current_angle': current_angle,
'current_intensity': current_intensity}
def getParameterDefaults(self):
self.protocol_parameters = {'width': 5.0,
'height': 50.0,
'intensity': [0.0, 1.0],
'center': [0, 0],
'speed': 80.0,
'angle': [0.0, 180.0],
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'MovingRectangle',
'num_epochs': 40,
'pre_time': 0.5,
'stim_time': 3.0,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
# class MovingSquareMapping(BaseProtocol):
# def __init__(self, cfg):
# super().__init__(cfg)
#
# self.getRunParameterDefaults()
# self.getParameterDefaults()
#
# def getEpochParameters(self):
# # adjust to screen center
# az_loc = [x + self.screen_center[0] for x in self.protocol_parameters['azimuth_locations']]
# el_loc = [x + self.screen_center[1] for x in self.protocol_parameters['elevation_locations']]
#
# if type(az_loc) is not list:
# az_loc = [az_loc]
# if type(el_loc) is not list:
# el_loc = [el_loc]
#
# center_el = np.median(el_loc)
# center_az = np.median(az_loc)
#
# location_list = np.concatenate((az_loc, el_loc))
# movement_axis_list = np.concatenate((np.ones(len(az_loc)), 2*np.ones(len(el_loc))))
#
# current_search_axis_code, current_location = self.selectParametersFromLists((movement_axis_list, location_list),
# all_combinations = False,
# randomize_order = self.protocol_parameters['randomize_order'])
#
# if current_search_axis_code == 1:
# current_search_axis = 'azimuth'
# angle = 90
# center = [current_location, center_el]
# elif current_search_axis_code == 2:
# current_search_axis = 'elevation'
# angle = 0
# center = [center_az, current_location]
#
# self.epoch_parameters = self.getMovingPatchParameters(height=self.protocol_parameters['square_width'],
# width=self.protocol_parameters['square_width'],
# angle=angle,
# center=center,
# color=self.protocol_parameters['intensity'])
#
# self.convenience_parameters = {'current_search_axis': current_search_axis,
# 'current_location': current_location,
# 'current_angle': angle,
# 'current_center': center}
#
# def getParameterDefaults(self):
# self.protocol_parameters = {'square_width': 5.0,
# 'intensity': 0.0,
# 'elevation_locations': [-20, -15, -10, -5, 0, 5, 10, 15, 20],
# 'azimuth_locations': [-20, -15, -10, -5, 0, 5, 10, 15, 20],
# 'speed': 80.0,
# 'randomize_order': True}
#
# def getRunParameterDefaults(self):
# self.run_parameters = {'protocol_ID': 'MovingSquareMapping',
# 'num_epochs': 100,
# 'pre_time': 0.5,
# 'stim_time': 2.0,
# 'tail_time': 1.0,
# 'idle_color': 0.5}
# %%
# class PeriodicVelocityNoise(BaseProtocol):
# def __init__(self, cfg):
# super().__init__(cfg)
#
# self.getRunParameterDefaults()
# self.getParameterDefaults()
#
# def getEpochParameters(self):
# if self.protocol_parameters['start_seed'] == -1:
# current_seed = np.random.randint(0, 10000)
# else:
# current_seed = self.protocol_parameters['start_seed'] + self.num_epochs_completed
#
# np.random.seed(int(current_seed))
# n_updates = int(np.ceil(self.run_parameters['stim_time'] * self.protocol_parameters['velocity_update_rate']))
# velocity = np.random.normal(size=n_updates, scale=self.protocol_parameters['velocity_std']) / self.protocol_parameters['velocity_update_rate'] # deg/sec -> deg/update
#
# time_steps = np.linspace(0, self.run_parameters['stim_time'], n_updates) # time steps of update trajectory
#
# position = np.cumsum(velocity) # position at each update time point, according to new velocity value
#
# theta_traj = {'name': 'tv_pairs',
# 'tv_pairs': list(zip(time_steps, position)),
# 'kind': 'linear'}
#
# distribution_data = {'name': 'Binary',
# 'args': [],
# 'kwargs': {'rand_min': self.protocol_parameters['intensity'],
# 'rand_max': self.protocol_parameters['intensity']}}
#
# self.epoch_parameters = {'name': 'RandomBars',
# 'distribution_data': distribution_data,
# 'period': self.protocol_parameters['period'],
# 'width': self.protocol_parameters['width'],
# 'vert_extent': self.protocol_parameters['height'],
# 'background': 0.5,
# 'color': [1, 1, 1, 1],
# 'theta': theta_traj,
# 'cylinder_location': (0, 0, self.protocol_parameters['z_offset'])}
#
# self.convenience_parameters = {'current_seed': current_seed,
# 'time_steps': time_steps,
# 'velocity': velocity,
# 'position': position}
#
# def getParameterDefaults(self):
# self.protocol_parameters = {'height': 120.0,
# 'width': 5.0,
# 'period': 40.0, # deg spacing between bars
# 'z_offset': 0.0, #meters, offset of cylinder
# 'velocity_std': 80.0, # deg/sec
# 'velocity_update_rate': 8, # Hz
# 'start_seed': -1,
# 'intensity': 0.0}
#
# def getRunParameterDefaults(self):
# self.run_parameters = {'protocol_ID': 'PeriodicVelocityNoise',
# 'num_epochs': 40,
# 'pre_time': 1.0,
# 'stim_time': 30.0,
# 'tail_time': 1.0,
# 'idle_color': 0.5}
# %%
# class VelocityNoise(BaseProtocol):
# def __init__(self, cfg):
# super().__init__(cfg)
#
# self.getRunParameterDefaults()
# self.getParameterDefaults()
#
# def getEpochParameters(self):
# adj_center = self.adjustCenter(self.protocol_parameters['center'])
#
# if self.protocol_parameters['start_seed'] == -1:
# current_seed = np.random.randint(0, 10000)
# else:
# current_seed = self.protocol_parameters['start_seed'] + self.num_epochs_completed
#
# # partition velocity trace up into splits, and follow each split with a reversed version of itself:
# # ensures that position keeps coming back to center
# np.random.seed(int(current_seed))
# n_updates = int(np.ceil(self.run_parameters['stim_time'] * self.protocol_parameters['velocity_update_rate'])/2)
# out = np.random.normal(size=n_updates, scale=self.protocol_parameters['velocity_std']) / self.protocol_parameters['velocity_update_rate'] # deg/sec -> deg/update
# back = -out
#
# split_size = 6 #sec
# splits = int(self.run_parameters['stim_time'] / split_size)
#
# out = np.reshape(out, [splits, -1])
# back = np.reshape(back, [splits, -1])
# v_comb = np.concatenate([out, back], axis=1)
# velocity = np.ravel(v_comb)
#
# time_steps = np.linspace(0, self.run_parameters['stim_time'], len(velocity)) # time steps of update trajectory
#
# position = adj_center[0] + np.cumsum(velocity) #position at each update time point, according to new velocity value
#
# theta_traj = {'name': 'tv_pairs',
# 'tv_pairs': list(zip(time_steps, position)),
# 'kind': 'linear'}
#
# self.epoch_parameters = {'name': 'MovingPatch',
# 'width': self.protocol_parameters['width'],
# 'height': self.protocol_parameters['height'],
# 'sphere_radius': 1,
# 'color': self.protocol_parameters['intensity'],
# 'theta': theta_traj,
# 'phi': adj_center[1],
# 'angle': 0}
# self.convenience_parameters = {'current_seed': current_seed,
# 'time_steps': time_steps,
# 'velocity': velocity,
# 'position': position}
#
# def getParameterDefaults(self):
# self.protocol_parameters = {'height': 10.0,
# 'width': 5.0,
# 'center': [0, 0],
# 'velocity_std': 80, # deg/sec
# 'velocity_update_rate': 8, # Hz
# 'start_seed': -1,
# 'intensity': 0.0}
#
# def getRunParameterDefaults(self):
# self.run_parameters = {'protocol_ID': 'VelocityNoise',
# 'num_epochs': 20,
# 'pre_time': 1.0,
# 'stim_time': 36.0,
# 'tail_time': 1.0,
# 'idle_color': 0.5}
# %%
class UniformFlash(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
adj_center = self.adjustCenter(self.protocol_parameters['center'])
current_intensity = self.selectParametersFromLists(self.protocol_parameters['intensity'], randomize_order=self.protocol_parameters['randomize_order'])
self.epoch_parameters = {'name': 'MovingPatch',
'width': self.protocol_parameters['width'],
'height': self.protocol_parameters['height'],
'sphere_radius': 1,
'color': current_intensity,
'theta': adj_center[0],
'phi': adj_center[1],
'angle': 0}
self.convenience_parameters = {'current_intensity': current_intensity}
def getParameterDefaults(self):
self.protocol_parameters = {'height': 120.0,
'width': 120.0,
'center': [0, 0],
'intensity': [1.0, 0.0],
'randomize_order': True}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'UniformFlash',
'num_epochs': 10,
'pre_time': 1.0,
'stim_time': 0.5,
'tail_time': 1.0,
'idle_color': 0.5}
# %%
# class SpotPair(BaseProtocol):
# def __init__(self, cfg):
# super().__init__(cfg)
#
# self.getRunParameterDefaults()
# self.getParameterDefaults()
#
# def getEpochParameters(self):
# current_speed_2 = self.selectParametersFromLists(self.protocol_parameters['speed_2'], randomize_order=self.protocol_parameters['randomize_order'])
#
# center = self.protocol_parameters['center']
# center_1 = [center[0], center[1] + self.protocol_parameters['y_separation']/2]
# spot_1_parameters = self.getMovingSpotParameters(color=self.protocol_parameters['intensity'][0],
# radius=self.protocol_parameters['diameter'][0]/2,
# center=center_1,
# speed=self.protocol_parameters['speed_1'],
# angle=0)
# center_2 = [center[0], center[1] - self.protocol_parameters['y_separation']/2]
# spot_2_parameters = self.getMovingSpotParameters(color=self.protocol_parameters['intensity'][1],
# radius=self.protocol_parameters['diameter'][1]/2,
# center=center_2,
# speed=current_speed_2,
# angle=0)
#
#
# self.epoch_parameters = (spot_1_parameters, spot_2_parameters)
#
# self.convenience_parameters = {'current_speed_2': current_speed_2}
#
# def loadStimuli(self, client):
# spot_1_parameters = self.epoch_parameters[0].copy()
# spot_2_parameters = self.epoch_parameters[1].copy()
#
# multicall = flyrpc.multicall.MyMultiCall(client.manager)
# bg = self.run_parameters.get('idle_color')
# multicall.load_stim('ConstantBackground', color=[bg, bg, bg, 1.0])
# multicall.load_stim(**spot_1_parameters, hold=True)
# multicall.load_stim(**spot_2_parameters, hold=True)
#
# multicall()
#
# def getParameterDefaults(self):
# self.protocol_parameters = {'diameter': [5.0, 5.0],
# 'intensity': [0.0, 0.0],
# 'center': [0, 0],
# 'y_separation': 7.0,
# 'speed_1': 80.0,
# 'speed_2': [-80.0, -40.0, 0.0, 40.0, 80.0],
# 'randomize_order': True}
#
# def getRunParameterDefaults(self):
# self.run_parameters = {'protocol_ID': 'SpotPair',
# 'num_epochs': 40,
# 'pre_time': 0.5,
# 'stim_time': 4.0,
# 'tail_time': 1.0,
# 'idle_color': 0.5}
# %%
"""
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # VR WORLD STIMS # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
class RealWalkThroughFakeForest(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_trajectory_index = int(self.selectParametersFromLists(self.protocol_parameters['trajectory_range'], randomize_order=True))
# load walk trajectory
trajectory_dir = os.path.join(inspect.getfile(visprotocol).split('visprotocol')[0], 'visprotocol', 'resources', self.user_name, 'walking_trajectories')
file_name = 'walking_traj_20200728.npy'
snippets = np.load(os.path.join(trajectory_dir, file_name), allow_pickle=True)
snippet = snippets[current_trajectory_index]
t = snippet['t']
x = snippet['x']
y = snippet['y']
heading = snippet['a']-90 # angle in degrees. Rotate by -90 to align with heading 0 being down +y axis
fly_x_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, x)),
'kind': 'linear'}
fly_y_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, y)),
'kind': 'linear'}
fly_theta_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, heading)),
'kind': 'linear'}
z_level = -0.20
tree_locations = []
np.random.seed(int(self.protocol_parameters['rand_seed']))
for tree in range(int(self.protocol_parameters['n_trees'])):
tree_locations.append([np.random.uniform(-0.5, 0.5), np.random.uniform(-0.5, 0.5), z_level+self.protocol_parameters['tree_height']/2])
self.epoch_parameters = {'name': 'Composite',
'tree_height': self.protocol_parameters['tree_height'],
'floor_color': self.protocol_parameters['floor_color'],
'sky_color': self.protocol_parameters['sky_color'],
'tree_color': self.protocol_parameters['tree_color'],
'fly_x_trajectory': fly_x_trajectory,
'fly_y_trajectory': fly_y_trajectory,
'fly_theta_trajectory': fly_theta_trajectory,
'tree_locations': tree_locations,
'z_level': z_level}
self.convenience_parameters = {'current_trajectory_index': current_trajectory_index,
'current_trajectory_library': file_name}
def loadStimuli(self, client):
passedParameters = self.epoch_parameters.copy()
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.set_fly_trajectory(passedParameters['fly_x_trajectory'],
passedParameters['fly_y_trajectory'],
passedParameters['fly_theta_trajectory'])
sc = passedParameters['sky_color']
multicall.load_stim(name='ConstantBackground',
color=[sc, sc, sc, 1.0])
# base_dir = r'C:\Users\mhturner\Documents\GitHub\visprotocol\resources\mht\images\VH_NatImages'
# fn = 'imk00125.iml'
# multicall.load_stim(name='HorizonCylinder',
# image_path=os.path.join(base_dir, fn))
fc = passedParameters['floor_color']
multicall.load_stim(name='TexturedGround',
color=[fc, fc, fc, 1.0],
z_level=passedParameters['z_level'],
hold=True)
multicall.load_stim(name='Forest',
color=passedParameters['tree_color'],
cylinder_height=passedParameters['tree_height'],
cylinder_radius=0.01,
cylinder_locations=passedParameters['tree_locations'],
n_faces=4,
hold=True)
multicall()
def getParameterDefaults(self):
self.protocol_parameters = {'n_trees': 40,
'tree_height': 1.0,
'floor_color': 0.40,
'sky_color': 0.5,
'tree_color': 0.0,
'rand_seed': 1,
'trajectory_range': [0, 1, 2, 3, 4]}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'ForestRandomWalk',
'num_epochs': 25,
'pre_time': 2.0,
'stim_time': 20.0,
'tail_time': 2.0,
'idle_color': 0.5}
# %%
class ApproachTuning(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
if self.protocol_parameters['start_seed'] == -1:
current_seed = np.random.randint(0, 10000)
else:
current_seed = self.protocol_parameters['start_seed'] + self.num_epochs_completed
np.random.seed(int(current_seed))
n_updates = int(np.ceil(self.run_parameters['stim_time'] * self.protocol_parameters['velocity_update_rate'])/2)
velocity = np.random.normal(size=n_updates, scale=self.protocol_parameters['velocity_std']) / self.protocol_parameters['velocity_update_rate'] # m/sec -> m/update
time_steps = np.linspace(0, self.run_parameters['stim_time'], len(velocity)) # time steps of update trajectory
# distance away from fly
distance = np.cumsum(velocity) # position at each update time point, according to new velocity value. Centered around 0
position_x = np.sin(np.deg2rad(self.protocol_parameters['tower_azimuth'])) * distance
position_y = np.cos(np.deg2rad(self.protocol_parameters['tower_azimuth'])) * distance
fly_x_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(time_steps, position_x)),
'kind': 'linear'}
fly_y_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(time_steps, position_y)),
'kind': 'linear'}
# tower location: along azimuth line
tower_location = [np.sin(np.deg2rad(self.protocol_parameters['tower_azimuth'])) * self.protocol_parameters['tower_distance'],
np.cos(np.deg2rad(self.protocol_parameters['tower_azimuth'])) * self.protocol_parameters['tower_distance'],
0]
self.epoch_parameters = {'name': 'Composite',
'tower_height': self.protocol_parameters['tower_height'],
'tower_diameter': self.protocol_parameters['tower_diameter'],
'tower_color': self.protocol_parameters['tower_color'],
'tower_location': tower_location,
'fly_x_trajectory': fly_x_trajectory,
'fly_y_trajectory': fly_y_trajectory}
self.convenience_parameters = {'current_seed': current_seed}
def loadStimuli(self, client):
passedParameters = self.epoch_parameters.copy()
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.set_fly_trajectory(passedParameters['fly_x_trajectory'], passedParameters['fly_y_trajectory'], 0)
bg = self.run_parameters.get('idle_color')
multicall.load_stim(name='ConstantBackground',
color=[bg, bg, bg, 1.0],
hold=True)
multicall.load_stim(name='Tower',
color=passedParameters['tower_color'],
cylinder_height=passedParameters['tower_height'],
cylinder_radius=passedParameters['tower_diameter']/2,
cylinder_location=passedParameters['tower_location'],
n_faces=4,
hold=True)
multicall()
def getParameterDefaults(self):
self.protocol_parameters = {'start_seed': -1,
'velocity_update_rate': 10,
'velocity_std': 0.03,
'tower_color': 0,
'tower_height': 1.0,
'tower_diameter': 0.01,
'tower_distance': 0.05,
'tower_azimuth': 45}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'ApproachTuning',
'num_epochs': 25,
'pre_time': 2.0,
'stim_time': 20.0,
'tail_time': 2.0,
'idle_color': 0.5}
# %%
class TowerDistanceWalk(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
current_forward_velocity, current_tower_diameter, current_tower_xoffset = self.selectParametersFromLists((self.protocol_parameters['forward_velocity'],
self.protocol_parameters['tower_diameter'],
self.protocol_parameters['tower_xoffset']), randomize_order=True)
# make walk trajectory
t = np.arange(0, self.run_parameters.get('stim_time'), 0.01) # sec
velocity_x = 0.0 # meters per sec
velocity_y = current_forward_velocity # meters per sec
x = velocity_x * t
y = velocity_y * t
heading = 0 * t
fly_x_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, x)),
'kind': 'linear'}
fly_y_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, y)),
'kind': 'linear'}
fly_theta_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, heading)),
'kind': 'linear'}
z_level = -0.05
tower_locations = []
for tree in range(int(self.protocol_parameters['n_towers'])):
tower_locations.append([current_tower_xoffset, # x
(tree+1) * self.protocol_parameters['tower_spacing'], # y
z_level+self.protocol_parameters['tower_height']/2]) # z
self.epoch_parameters = {'name': 'Composite',
'tower_height': self.protocol_parameters['tower_height'],
'tower_diameter': current_tower_diameter,
'floor_color': self.protocol_parameters['floor_color'],
'sky_color': self.protocol_parameters['sky_color'],
'tower_color': self.protocol_parameters['tower_color'],
'fly_x_trajectory': fly_x_trajectory,
'fly_y_trajectory': fly_y_trajectory,
'fly_theta_trajectory': fly_theta_trajectory,
'tower_locations': tower_locations,
'z_level': z_level}
self.convenience_parameters = {'current_forward_velocity': current_forward_velocity,
'current_tower_diameter': current_tower_diameter,
'current_tower_xoffset': current_tower_xoffset}
def loadStimuli(self, client):
passedParameters = self.epoch_parameters.copy()
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.set_fly_trajectory(passedParameters['fly_x_trajectory'],
passedParameters['fly_y_trajectory'],
passedParameters['fly_theta_trajectory'])
sc = passedParameters['sky_color']
multicall.load_stim(name='ConstantBackground',
color=[sc, sc, sc, 1.0])
fc = passedParameters['floor_color']
multicall.load_stim(name='TexturedGround',
color=[fc, fc, fc, 1.0],
z_level=passedParameters['z_level'],
hold=True)
multicall.load_stim(name='Forest',
color=passedParameters['tower_color'],
cylinder_height=passedParameters['tower_height'],
cylinder_radius=passedParameters['tower_diameter']/2,
cylinder_locations=passedParameters['tower_locations'],
n_faces=8,
hold=True)
multicall()
def getParameterDefaults(self):
self.protocol_parameters = {'forward_velocity': [0.02],
'n_towers': 5,
'tower_height': 1.0,
'tower_diameter': [0.01, 0.02, 0.03],
'tower_spacing': 0.08,
'tower_xoffset': [-0.01, -0.02, -0.04, -0.06],
'tower_color': 0.0,
'floor_color': 0.40,
'sky_color': 0.75}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'TowerDistanceWalk',
'num_epochs': 40,
'pre_time': 2.0,
'stim_time': 20.0,
'tail_time': 2.0,
'idle_color': 0.5}
# %%
class MovingSpotOnVR(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.getRunParameterDefaults()
self.getParameterDefaults()
def getEpochParameters(self):
adj_center = self.adjustCenter(self.protocol_parameters['center'])
current_trajectory_index = int(self.selectParametersFromLists(self.protocol_parameters['trajectory_range'], randomize_order=True))
# load walk trajectory
trajectory_dir = os.path.join(inspect.getfile(visprotocol).split('visprotocol')[0], 'visprotocol', 'resources', self.user_name, 'walking_trajectories')
file_name = 'walking_traj_20200728.npy'
snippets = np.load(os.path.join(trajectory_dir, file_name), allow_pickle=True)
snippet = snippets[current_trajectory_index]
t = snippet['t']
x = snippet['x']
y = snippet['y']
heading = snippet['a']-90 # angle in degrees. Rotate by -90 to align with heading 0 being down +y axis
fly_x_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, x)),
'kind': 'linear'}
fly_y_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, y)),
'kind': 'linear'}
fly_theta_trajectory = {'name': 'tv_pairs',
'tv_pairs': list(zip(t, heading)),
'kind': 'linear'}
z_level = -0.20
tree_locations = []
np.random.seed(int(self.protocol_parameters['rand_seed']))
for tree in range(int(self.protocol_parameters['n_trees'])):
tree_locations.append([np.random.uniform(-0.5, 0.5), np.random.uniform(-0.5, 0.5), z_level+self.protocol_parameters['tree_height']/2])
vr_parameters = {'name': 'Composite',
'tree_height': self.protocol_parameters['tree_height'],
'floor_color': self.protocol_parameters['floor_color'],
'sky_color': self.protocol_parameters['sky_color'],
'tree_color': self.protocol_parameters['tree_color'],
'fly_x_trajectory': fly_x_trajectory,
'fly_y_trajectory': fly_y_trajectory,
'fly_theta_trajectory': fly_theta_trajectory,
'tree_locations': tree_locations,
'z_level': z_level}
position_traj = {'name': 'Sinusoid',
'temporal_frequency': self.protocol_parameters['spot_traj_frequency'],
'amplitude': self.protocol_parameters['spot_traj_amplitude'],
'offset': adj_center[0]}
patch_parameters = {'name': 'MovingSpot',
'radius': self.protocol_parameters['spot_radius'],
'color': self.protocol_parameters['spot_color'],
'theta': position_traj,
'phi': adj_center[1],
'sphere_radius': 0.05}
self.epoch_parameters = (vr_parameters, patch_parameters)
self.convenience_parameters = {'current_trajectory_index': current_trajectory_index,
'current_trajectory_library': file_name}
def loadStimuli(self, client):
vr_parameters = self.epoch_parameters[0].copy()
patch_parameters = self.epoch_parameters[1].copy()
multicall = flyrpc.multicall.MyMultiCall(client.manager)
multicall.set_fly_trajectory(vr_parameters['fly_x_trajectory'],
vr_parameters['fly_y_trajectory'],
vr_parameters['fly_theta_trajectory'])
sc = vr_parameters['sky_color']
multicall.load_stim(name='ConstantBackground',
color=[sc, sc, sc, 1.0])
fc = vr_parameters['floor_color']
multicall.load_stim(name='TexturedGround',
color=[fc, fc, fc, 1.0],
z_level=vr_parameters['z_level'],
hold=True)
multicall.load_stim(name='Forest',
color=[0.1, 0.1, 0.1, 1],
cylinder_height=vr_parameters['tree_height'],
cylinder_radius=0.01,
cylinder_locations=vr_parameters['tree_locations'],
n_faces=4,
hold=True)
multicall.load_stim(**patch_parameters, hold=True)
multicall()
def getParameterDefaults(self):
self.protocol_parameters = {'n_trees': 40,
'tree_height': 1.0,
'floor_color': 0.40,
'sky_color': 0.5,
'tree_color': 0.0,
'rand_seed': 1,
'trajectory_range': [0, 1, 2, 3, 4],
'spot_radius': 7.5,
'spot_color': 0.0,
'spot_traj_frequency': 0.5,
'spot_traj_amplitude': 60.0, # amp of sinusoid (1/2 of peak to trough total distance)
'center': [0.0, 0.0]}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'MovingSpotOnVR',
'num_epochs': 25,
'pre_time': 2.0,
'stim_time': 20.0,
'tail_time': 2.0,
'idle_color': 0.5}
# %%
"""
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # MULTI-COMPONENT STIMS # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
"""
class PanGlomSuite(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.cfg = cfg
self.stim_list = ['FlickeringPatch', 'DriftingSquareGrating', 'LoomingSpot', 'ExpandingMovingSpot', 'MovingSpotOnDriftingGrating',
'MovingRectangle', 'UniformFlash']
n = [3, 2, 3, 12, 6, 4, 2] # weight each stim draw by how many trial types it has. Total = 32
avg_per_stim = int(self.run_parameters['num_epochs'] / np.sum(n))
all_stims = [[self.stim_list[i]] * n[i] * avg_per_stim for i in range(len(n))]
self.stim_order = np.random.permutation(np.hstack(all_stims))
# initialize each component class
self.initComponentClasses()
self.getRunParameterDefaults()
self.getParameterDefaults()
def initComponentClasses(self):
# pre-populate dict of component classes. Each with its own num_epochs_completed counter etc
self.component_classes = {}
for stim_type in self.stim_list:
if stim_type == 'LoomingSpot':
new_component_class = LoomingSpot(self.cfg)
new_component_class.protocol_parameters = {'intensity': 0.0,
'center': [0, 0],
'start_size': 2.5,
'end_size': 80.0,
'rv_ratio': [5.0, 20.0, 100.0],
'randomize_order': True}
elif stim_type == 'DriftingSquareGrating':
new_component_class = DriftingSquareGrating(self.cfg)
new_component_class.protocol_parameters = {'period': 20.0,
'rate': 20.0,
'contrast': 1.0,
'mean': 0.5,
'angle': [0.0, 180.0],
'center': [0, 0],
'center_size': 180.0,
'randomize_order': True}
elif stim_type == 'ExpandingMovingSpot':
new_component_class = ExpandingMovingSpot(self.cfg)
new_component_class.protocol_parameters = {'diameter': [5.0, 15.0, 50.0],
'intensity': [0.0, 1.0],
'center': [0, 0],
'speed': [-80.0, 80.0],
'angle': 0.0,
'randomize_order': True}
elif stim_type == 'UniformFlash':
new_component_class = UniformFlash(self.cfg)
new_component_class.protocol_parameters = {'height': 240.0,
'width': 240.0,
'center': [0, 0],
'intensity': [1.0, 0.0],
'randomize_order': True}
elif stim_type == 'FlickeringPatch':
new_component_class = FlickeringPatch(self.cfg)
new_component_class.protocol_parameters = {'height': 30.0,
'width': 30.0,
'center': [0, 0],
'contrast': 1.0,
'mean': 0.5,
'temporal_frequency': [1.0, 2.0, 8.0],
'randomize_order': True}
elif stim_type == 'MovingSpotOnDriftingGrating':
new_component_class = MovingSpotOnDriftingGrating(self.cfg)
new_component_class.protocol_parameters = {'center': [0, 0],
'spot_radius': 7.5,
'spot_color': 0.0,
'spot_speed': 60.0,
'grate_period': 20.0,
'grate_rate': [-120.0, -90.0, -30.0, 30.0, 90.0, 120.0],
'grate_contrast': 0.5,
'angle': 0.0,
'randomize_order': True}
elif stim_type == 'MovingRectangle':
new_component_class = MovingRectangle(self.cfg)
new_component_class.protocol_parameters = {'width': 10.0,
'height': 120.0,
'intensity': [0.0, 1.0],
'center': [0, 0],
'speed': 80.0,
'angle': [0.0, 180.0],
'randomize_order': True}
# Lock component stim timing run params to suite run params
new_component_class.run_parameters['pre_time'] = self.run_parameters['pre_time']
new_component_class.run_parameters['stim_time'] = self.run_parameters['stim_time']
new_component_class.run_parameters['tail_time'] = self.run_parameters['tail_time']
new_component_class.run_parameters['idle_color'] = self.run_parameters['idle_color']
self.component_classes[stim_type] = new_component_class
def getEpochParameters(self):
stim_type = str(self.stim_order[self.num_epochs_completed]) # note this num_epochs_completed is for the whole suite, not component stim!
self.convenience_parameters = {'component_stim_type': stim_type}
self.component_class = self.component_classes[stim_type]
self.component_class.getEpochParameters()
self.convenience_parameters.update(self.component_class.convenience_parameters)
self.epoch_parameters = self.component_class.epoch_parameters
def loadStimuli(self, client):
self.component_class.loadStimuli(client)
self.component_class.advanceEpochCounter() # up the component class epoch counter
def getParameterDefaults(self):
self.protocol_parameters = {}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'PanGlomSuite',
'num_epochs': 160, # 160 = 32 * 5 averages each
'pre_time': 1.5,
'stim_time': 3.0,
'tail_time': 1.5,
'idle_color': 0.5}
# %%
class TuningSuite(BaseProtocol):
def __init__(self, cfg):
super().__init__(cfg)
self.cfg = cfg
self.stim_list = ['ExpandingMovingSpot', 'MovingRectangle']
n = [12, 4] # weight each stim draw by how many trial types it has. Total = 20
avg_per_stim = int(self.run_parameters['num_epochs'] / np.sum(n))
all_stims = [[self.stim_list[i]] * n[i] * avg_per_stim for i in range(len(n))]
self.stim_order = np.random.permutation(np.hstack(all_stims))
# initialize each component class
self.initComponentClasses()
self.getRunParameterDefaults()
self.getParameterDefaults()
def initComponentClasses(self):
# pre-populate dict of component classes. Each with its own num_epochs_completed counter etc
self.component_classes = {}
for stim_type in self.stim_list:
if stim_type == 'ExpandingMovingSpot':
new_component_class = ExpandingMovingSpot(self.cfg)
new_component_class.protocol_parameters = {'diameter': [5.0, 15.0, 50.0],
'intensity': [0.0, 1.0],
'center': [0, 0],
'speed': [-80.0, 80.0],
'angle': 0.0,
'randomize_order': True}
elif stim_type == 'MovingRectangle':
new_component_class = MovingRectangle(self.cfg)
new_component_class.protocol_parameters = {'width': 10.0,
'height': 120.0,
'intensity': [0.0, 1.0],
'center': [0, 0],
'speed': 80.0,
'angle': [0.0, 180.0],
'randomize_order': True}
# Lock component stim timing run params to suite run params
new_component_class.run_parameters['pre_time'] = self.run_parameters['pre_time']
new_component_class.run_parameters['stim_time'] = self.run_parameters['stim_time']
new_component_class.run_parameters['tail_time'] = self.run_parameters['tail_time']
new_component_class.run_parameters['idle_color'] = self.run_parameters['idle_color']
self.component_classes[stim_type] = new_component_class
def getEpochParameters(self):
stim_type = str(self.stim_order[self.num_epochs_completed]) # note this num_epochs_completed is for the whole suite, not component stim!
self.convenience_parameters = {'component_stim_type': stim_type}
self.component_class = self.component_classes[stim_type]
self.component_class.getEpochParameters()
self.convenience_parameters.update(self.component_class.convenience_parameters)
self.epoch_parameters = self.component_class.epoch_parameters
def loadStimuli(self, client):
self.component_class.loadStimuli(client)
self.component_class.advanceEpochCounter() # up the component class epoch counter
def getParameterDefaults(self):
self.protocol_parameters = {}
def getRunParameterDefaults(self):
self.run_parameters = {'protocol_ID': 'TuningSuite',
'num_epochs': 80, # 80 = 16 * 5 averages each
'pre_time': 1.5,
'stim_time': 3.0,
'tail_time': 1.5,
'idle_color': 0.5}
| 51.289746 | 570 | 0.508391 | 10,016 | 109,042 | 5.241613 | 0.051518 | 0.098743 | 0.116914 | 0.017486 | 0.836476 | 0.79141 | 0.769905 | 0.740686 | 0.715848 | 0.696743 | 0 | 0.024862 | 0.387309 | 109,042 | 2,125 | 571 | 51.313882 | 0.76096 | 0.172732 | 0 | 0.718973 | 0 | 0 | 0.13267 | 0.012009 | 0 | 0 | 0 | 0.000471 | 0 | 1 | 0.071327 | false | 0.022111 | 0.005706 | 0 | 0.094864 | 0.00428 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c00877f1a613cf96fba7b5d89a1b9e00beb8db4d | 13,260 | gyp | Python | chrome/browser/resources/chromeos/compiled_resources2.gyp | metux/chromium-deb | 3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | chrome/browser/resources/chromeos/compiled_resources2.gyp | metux/chromium-deb | 3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | chrome/browser/resources/chromeos/compiled_resources2.gyp | metux/chromium-deb | 3c08e9b89a1b6f95f103a61ff4f528dbcd57fc42 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'bluetooth_pair_device',
'variables': {
'extra_inputs': [
'<!@(python <(CLOSURE_DIR)/build/get_includes.py ../options/options_bundle.js)',
],
'script_args': ['--custom_sources', '--custom_includes'],
'source_files': [
'<(DEPTH)/third_party/jstemplate/util.js',
'<(DEPTH)/third_party/jstemplate/jsevalcontext.js',
'<(DEPTH)/third_party/jstemplate/jstemplate.js',
'<(DEPTH)/ui/webui/resources/cr_elements/chromeos/network/cr_onc_types.js',
'<(DEPTH)/ui/webui/resources/js/action_link.js',
'<(DEPTH)/ui/webui/resources/js/cr.js',
'<(DEPTH)/ui/webui/resources/js/cr/event_target.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/array_data_model.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/autocomplete_list.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/bubble.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/bubble_button.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/command.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/controlled_indicator.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/focus_manager.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/focus_outline_manager.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/focus_without_ink.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_item.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_selection_controller.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_selection_model.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_single_selection_model.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/grid.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/menu.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/menu_item.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/overlay.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/position_util.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/node_utils.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/page_manager/page.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/page_manager/page_manager.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/repeating_button.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/touch_handler.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/tree.js',
'<(DEPTH)/ui/webui/resources/js/event_tracker.js',
'<(DEPTH)/ui/webui/resources/js/icon.js',
'<(DEPTH)/ui/webui/resources/js/load_time_data.js',
'<(DEPTH)/ui/webui/resources/js/parse_html_subset.js',
'<(DEPTH)/ui/webui/resources/js/promise_resolver.js',
'<(DEPTH)/ui/webui/resources/js/util.js',
'../chromeos/keyboard/keyboard_utils.js',
'<(DEPTH)/ui/webui/resources/js/i18n_behavior.js',
'<(DEPTH)/ui/webui/resources/js/web_ui_listener_behavior.js',
'../settings/page_visibility.js',
'../settings/route.js',
'../settings/people_page/easy_unlock_browser_proxy.js',
'../settings/people_page/fingerprint_browser_proxy.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-a11y-keys-behavior/iron-a11y-keys-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/iron-selection-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/iron-selectable-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/iron-multi-selectable-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-menu-behavior/iron-menu-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-menu-behavior/iron-menubar-behavior-extracted.js',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_profile_avatar_selector/cr_profile_avatar_selector_grid.js',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_profile_avatar_selector/cr_profile_avatar_selector.js',
'../settings/people_page/lock_screen_constants.js',
'<(DEPTH)/third_party/closure_compiler/externs/quick_unlock_private.js',
'../settings/people_page/lock_state_behavior.js',
'../settings/people_page/password_prompt_dialog.js',
'<(DEPTH)/ui/webui/resources/js/assert.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-meta/iron-meta-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-icon/iron-icon-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-behaviors/iron-control-state-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-behaviors/iron-button-state-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-ripple/paper-ripple-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-behaviors/paper-ripple-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-behaviors/paper-inky-focus-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-icon-button/paper-icon-button-extracted.js',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_dialog/cr_dialog.js',
'../settings/people_page/lock_screen.js',
'<(DEPTH)/third_party/closure_compiler/externs/bluetooth.js',
'<(DEPTH)/third_party/closure_compiler/externs/bluetooth_private.js',
'<(DEPTH)/third_party/closure_compiler/externs/management.js',
'<(DEPTH)/third_party/closure_compiler/externs/metrics_private.js',
'<(DEPTH)/third_party/closure_compiler/externs/networking_private.js',
'<(DEPTH)/third_party/closure_compiler/externs/chrome_send.js',
'<(DEPTH)/third_party/closure_compiler/externs/web_animations.js',
'<(DEPTH)/ui/webui/resources/cr_elements/chromeos/network/cr_network_icon_externs.js',
'../options/options_bundle.js',
# Note: ^ this is just a copy of
# ../options/compiled_resources2.gyp:options_bundle#source_files. Most
# of this code is deprecated, but please keep in sync if you really
# need to change.
'bluetooth_pair_device.js',
],
},
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'certificate_manager_dialog',
'variables': {
'extra_inputs': [
'<!@(python <(CLOSURE_DIR)/build/get_includes.py ../options/options_bundle.js)',
],
'script_args': ['--custom_sources', '--custom_includes'],
'source_files': [
'<(DEPTH)/third_party/jstemplate/util.js',
'<(DEPTH)/third_party/jstemplate/jsevalcontext.js',
'<(DEPTH)/third_party/jstemplate/jstemplate.js',
'<(DEPTH)/ui/webui/resources/cr_elements/chromeos/network/cr_onc_types.js',
'<(DEPTH)/ui/webui/resources/js/action_link.js',
'<(DEPTH)/ui/webui/resources/js/cr.js',
'<(DEPTH)/ui/webui/resources/js/cr/event_target.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/array_data_model.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/autocomplete_list.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/bubble.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/bubble_button.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/command.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/controlled_indicator.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/focus_manager.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/focus_outline_manager.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/focus_without_ink.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_item.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_selection_controller.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_selection_model.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/list_single_selection_model.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/grid.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/menu.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/menu_item.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/overlay.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/position_util.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/node_utils.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/page_manager/page.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/page_manager/page_manager.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/repeating_button.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/touch_handler.js',
'<(DEPTH)/ui/webui/resources/js/cr/ui/tree.js',
'<(DEPTH)/ui/webui/resources/js/event_tracker.js',
'<(DEPTH)/ui/webui/resources/js/icon.js',
'<(DEPTH)/ui/webui/resources/js/load_time_data.js',
'<(DEPTH)/ui/webui/resources/js/parse_html_subset.js',
'<(DEPTH)/ui/webui/resources/js/promise_resolver.js',
'<(DEPTH)/ui/webui/resources/js/util.js',
'../chromeos/keyboard/keyboard_utils.js',
'<(DEPTH)/ui/webui/resources/js/i18n_behavior.js',
'<(DEPTH)/ui/webui/resources/js/web_ui_listener_behavior.js',
'../settings/page_visibility.js',
'../settings/route.js',
'../settings/people_page/easy_unlock_browser_proxy.js',
'../settings/people_page/fingerprint_browser_proxy.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-a11y-keys-behavior/iron-a11y-keys-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/iron-selection-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/iron-selectable-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-selector/iron-multi-selectable-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-menu-behavior/iron-menu-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-menu-behavior/iron-menubar-behavior-extracted.js',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_profile_avatar_selector/cr_profile_avatar_selector_grid.js',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_profile_avatar_selector/cr_profile_avatar_selector.js',
'../settings/people_page/lock_screen_constants.js',
'<(DEPTH)/third_party/closure_compiler/externs/quick_unlock_private.js',
'../settings/people_page/lock_state_behavior.js',
'../settings/people_page/password_prompt_dialog.js',
'<(DEPTH)/ui/webui/resources/js/assert.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-meta/iron-meta-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-icon/iron-icon-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-behaviors/iron-control-state-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/iron-behaviors/iron-button-state-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-ripple/paper-ripple-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-behaviors/paper-ripple-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-behaviors/paper-inky-focus-behavior-extracted.js',
'<(DEPTH)/third_party/polymer/v1_0/components-chromium/paper-icon-button/paper-icon-button-extracted.js',
'<(DEPTH)/ui/webui/resources/cr_elements/cr_dialog/cr_dialog.js',
'../settings/people_page/lock_screen.js',
'<(DEPTH)/third_party/closure_compiler/externs/bluetooth.js',
'<(DEPTH)/third_party/closure_compiler/externs/bluetooth_private.js',
'<(DEPTH)/third_party/closure_compiler/externs/management.js',
'<(DEPTH)/third_party/closure_compiler/externs/metrics_private.js',
'<(DEPTH)/third_party/closure_compiler/externs/networking_private.js',
'<(DEPTH)/third_party/closure_compiler/externs/chrome_send.js',
'<(DEPTH)/third_party/closure_compiler/externs/web_animations.js',
'<(DEPTH)/ui/webui/resources/cr_elements/chromeos/network/cr_network_icon_externs.js',
'../options/options_bundle.js',
# Note: ^ this is just a copy of
# ../options/compiled_resources2.gyp:options_bundle#source_files. Most
# of this code is deprecated, but please keep in sync if you really
# need to change.
'certificate_manager_dialog.js',
],
},
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
],
}
| 66.969697 | 127 | 0.666817 | 1,739 | 13,260 | 4.903968 | 0.104083 | 0.109991 | 0.09076 | 0.141182 | 0.972092 | 0.972092 | 0.972092 | 0.972092 | 0.972092 | 0.9606 | 0 | 0.006824 | 0.160106 | 13,260 | 197 | 128 | 67.309645 | 0.758912 | 0.038989 | 0 | 0.930108 | 0 | 0.150538 | 0.794171 | 0.773509 | 0 | 0 | 0 | 0 | 0.010753 | 1 | 0 | true | 0.010753 | 0 | 0 | 0 | 0.010753 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
c00fe4b754d67e83bf05da034a3092f0382ea21e | 231 | py | Python | streusle_tagger/__init__.py | mkranzlein/streusle-tagger | 01974d963f89f09e5424ebafe6a72088c2b4f6fc | [
"MIT"
] | 7 | 2019-08-16T13:12:38.000Z | 2020-04-17T23:58:33.000Z | streusle_tagger/__init__.py | mkranzlein/streusle-tagger | 01974d963f89f09e5424ebafe6a72088c2b4f6fc | [
"MIT"
] | 3 | 2019-08-16T00:04:47.000Z | 2019-12-04T19:34:48.000Z | streusle_tagger/__init__.py | mkranzlein/streusle-tagger | 01974d963f89f09e5424ebafe6a72088c2b4f6fc | [
"MIT"
] | 2 | 2019-08-16T07:30:48.000Z | 2019-11-22T02:57:14.000Z | # pylint: disable=wildcard-import
from streusle_tagger.data import *
from streusle_tagger.dataset_readers import *
from streusle_tagger.metrics import *
from streusle_tagger.models import *
from streusle_tagger.predictors import *
| 33 | 45 | 0.839827 | 30 | 231 | 6.266667 | 0.433333 | 0.265957 | 0.478723 | 0.638298 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.099567 | 231 | 6 | 46 | 38.5 | 0.903846 | 0.134199 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c05f33cfc645cb9a2defa63231f425f7211c612b | 155 | py | Python | functsql/dictsql/__init__.py | bpeterso2000/functsql | a02058a2354fc28344ead21c526a34b2ce72bd17 | [
"Apache-2.0"
] | null | null | null | functsql/dictsql/__init__.py | bpeterso2000/functsql | a02058a2354fc28344ead21c526a34b2ce72bd17 | [
"Apache-2.0"
] | null | null | null | functsql/dictsql/__init__.py | bpeterso2000/functsql | a02058a2354fc28344ead21c526a34b2ce72bd17 | [
"Apache-2.0"
] | null | null | null | from .delete import *
from .group_by import *
from .join import *
from .order_by import *
from .select import *
from .update import *
from .where import *
| 19.375 | 23 | 0.729032 | 23 | 155 | 4.826087 | 0.434783 | 0.540541 | 0.216216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.180645 | 155 | 7 | 24 | 22.142857 | 0.874016 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
223597ecae661b0d6a73417096c14e04adbc76ff | 10,553 | py | Python | nova/tests/unit/scheduler/filters/test_ram_filters.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/scheduler/filters/test_ram_filters.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/scheduler/filters/test_ram_filters.py | bopopescu/nova-token | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | [
"Apache-2.0"
] | 2 | 2017-07-20T17:31:34.000Z | 2020-07-24T02:42:19.000Z | begin_unit
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'mock'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'scheduler'
op|'.'
name|'filters'
name|'import'
name|'ram_filter'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'test'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'tests'
op|'.'
name|'unit'
op|'.'
name|'scheduler'
name|'import'
name|'fakes'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|TestRamFilter
name|'class'
name|'TestRamFilter'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestRamFilter'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'filt_cls'
op|'='
name|'ram_filter'
op|'.'
name|'RamFilter'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ram_filter_fails_on_memory
dedent|''
name|'def'
name|'test_ram_filter_fails_on_memory'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
nl|'\n'
name|'flavor'
op|'='
name|'objects'
op|'.'
name|'Flavor'
op|'('
name|'memory_mb'
op|'='
number|'1024'
op|')'
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'free_ram_mb'"
op|':'
number|'1023'
op|','
string|"'total_usable_ram_mb'"
op|':'
number|'1024'
op|','
nl|'\n'
string|"'ram_allocation_ratio'"
op|':'
number|'1.0'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ram_filter_passes
dedent|''
name|'def'
name|'test_ram_filter_passes'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
nl|'\n'
name|'flavor'
op|'='
name|'objects'
op|'.'
name|'Flavor'
op|'('
name|'memory_mb'
op|'='
number|'1024'
op|')'
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'free_ram_mb'"
op|':'
number|'1024'
op|','
string|"'total_usable_ram_mb'"
op|':'
number|'1024'
op|','
nl|'\n'
string|"'ram_allocation_ratio'"
op|':'
number|'1.0'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ram_filter_oversubscribe
dedent|''
name|'def'
name|'test_ram_filter_oversubscribe'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
nl|'\n'
name|'flavor'
op|'='
name|'objects'
op|'.'
name|'Flavor'
op|'('
name|'memory_mb'
op|'='
number|'1024'
op|')'
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'free_ram_mb'"
op|':'
op|'-'
number|'1024'
op|','
string|"'total_usable_ram_mb'"
op|':'
number|'2048'
op|','
nl|'\n'
string|"'ram_allocation_ratio'"
op|':'
number|'2.0'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'2048'
op|'*'
number|'2.0'
op|','
name|'host'
op|'.'
name|'limits'
op|'['
string|"'memory_mb'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_ram_filter_oversubscribe_singe_instance_fails
dedent|''
name|'def'
name|'test_ram_filter_oversubscribe_singe_instance_fails'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
nl|'\n'
name|'flavor'
op|'='
name|'objects'
op|'.'
name|'Flavor'
op|'('
name|'memory_mb'
op|'='
number|'1024'
op|')'
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'free_ram_mb'"
op|':'
number|'512'
op|','
string|"'total_usable_ram_mb'"
op|':'
number|'512'
op|','
nl|'\n'
string|"'ram_allocation_ratio'"
op|':'
number|'2.0'
op|'}'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
dedent|''
dedent|''
op|'@'
name|'mock'
op|'.'
name|'patch'
op|'('
string|"'nova.scheduler.filters.utils.aggregate_values_from_key'"
op|')'
newline|'\n'
DECL|class|TestAggregateRamFilter
name|'class'
name|'TestAggregateRamFilter'
op|'('
name|'test'
op|'.'
name|'NoDBTestCase'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|setUp
indent|' '
name|'def'
name|'setUp'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'TestAggregateRamFilter'
op|','
name|'self'
op|')'
op|'.'
name|'setUp'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'filt_cls'
op|'='
name|'ram_filter'
op|'.'
name|'AggregateRamFilter'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_aggregate_ram_filter_value_error
dedent|''
name|'def'
name|'test_aggregate_ram_filter_value_error'
op|'('
name|'self'
op|','
name|'agg_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
nl|'\n'
name|'context'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'ctx'
op|','
nl|'\n'
name|'flavor'
op|'='
name|'objects'
op|'.'
name|'Flavor'
op|'('
name|'memory_mb'
op|'='
number|'1024'
op|')'
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'free_ram_mb'"
op|':'
number|'1024'
op|','
string|"'total_usable_ram_mb'"
op|':'
number|'1024'
op|','
nl|'\n'
string|"'ram_allocation_ratio'"
op|':'
number|'1.0'
op|'}'
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'return_value'
op|'='
name|'set'
op|'('
op|'['
string|"'XXX'"
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1024'
op|'*'
number|'1.0'
op|','
name|'host'
op|'.'
name|'limits'
op|'['
string|"'memory_mb'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_aggregate_ram_filter_default_value
dedent|''
name|'def'
name|'test_aggregate_ram_filter_default_value'
op|'('
name|'self'
op|','
name|'agg_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
nl|'\n'
name|'context'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'ctx'
op|','
nl|'\n'
name|'flavor'
op|'='
name|'objects'
op|'.'
name|'Flavor'
op|'('
name|'memory_mb'
op|'='
number|'1024'
op|')'
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'free_ram_mb'"
op|':'
number|'1023'
op|','
string|"'total_usable_ram_mb'"
op|':'
number|'1024'
op|','
nl|'\n'
string|"'ram_allocation_ratio'"
op|':'
number|'1.0'
op|'}'
op|')'
newline|'\n'
comment|'# False: fallback to default flag w/o aggregates'
nl|'\n'
name|'agg_mock'
op|'.'
name|'return_value'
op|'='
name|'set'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertFalse'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'return_value'
op|'='
name|'set'
op|'('
op|'['
string|"'2.0'"
op|']'
op|')'
newline|'\n'
comment|'# True: use ratio from aggregates'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1024'
op|'*'
number|'2.0'
op|','
name|'host'
op|'.'
name|'limits'
op|'['
string|"'memory_mb'"
op|']'
op|')'
newline|'\n'
nl|'\n'
DECL|member|test_aggregate_ram_filter_conflict_values
dedent|''
name|'def'
name|'test_aggregate_ram_filter_conflict_values'
op|'('
name|'self'
op|','
name|'agg_mock'
op|')'
op|':'
newline|'\n'
indent|' '
name|'spec_obj'
op|'='
name|'objects'
op|'.'
name|'RequestSpec'
op|'('
nl|'\n'
name|'context'
op|'='
name|'mock'
op|'.'
name|'sentinel'
op|'.'
name|'ctx'
op|','
nl|'\n'
name|'flavor'
op|'='
name|'objects'
op|'.'
name|'Flavor'
op|'('
name|'memory_mb'
op|'='
number|'1024'
op|')'
op|')'
newline|'\n'
name|'host'
op|'='
name|'fakes'
op|'.'
name|'FakeHostState'
op|'('
string|"'host1'"
op|','
string|"'node1'"
op|','
nl|'\n'
op|'{'
string|"'free_ram_mb'"
op|':'
number|'1023'
op|','
string|"'total_usable_ram_mb'"
op|':'
number|'1024'
op|','
nl|'\n'
string|"'ram_allocation_ratio'"
op|':'
number|'1.0'
op|'}'
op|')'
newline|'\n'
name|'agg_mock'
op|'.'
name|'return_value'
op|'='
name|'set'
op|'('
op|'['
string|"'1.5'"
op|','
string|"'2.0'"
op|']'
op|')'
newline|'\n'
comment|'# use the minimum ratio from aggregates'
nl|'\n'
name|'self'
op|'.'
name|'assertTrue'
op|'('
name|'self'
op|'.'
name|'filt_cls'
op|'.'
name|'host_passes'
op|'('
name|'host'
op|','
name|'spec_obj'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'assertEqual'
op|'('
number|'1024'
op|'*'
number|'1.5'
op|','
name|'host'
op|'.'
name|'limits'
op|'['
string|"'memory_mb'"
op|']'
op|')'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| 12.638323 | 88 | 0.608358 | 1,584 | 10,553 | 3.941919 | 0.096591 | 0.154709 | 0.073671 | 0.086483 | 0.839366 | 0.836803 | 0.808776 | 0.793402 | 0.736227 | 0.728219 | 0 | 0.01514 | 0.09874 | 10,553 | 834 | 89 | 12.653477 | 0.641363 | 0 | 0 | 0.948441 | 0 | 0 | 0.373543 | 0.061594 | 0 | 0 | 0 | 0 | 0.014388 | 0 | null | null | 0.01199 | 0.005995 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
224f8d50f2f65a343b0c5e5f7157e61b5bc2543f | 7,539 | py | Python | n2j/models/gnn.py | jiwoncpark/ex-con | 6775d11ec1c3e7005890e58d16dd07b711861cdf | [
"MIT"
] | 1 | 2021-10-08T20:21:33.000Z | 2021-10-08T20:21:33.000Z | n2j/models/gnn.py | jiwoncpark/node-to-joy | 980dd6ad11971782221490c6e3267b43f242fff6 | [
"MIT"
] | 13 | 2021-03-10T11:46:42.000Z | 2021-08-23T19:36:08.000Z | n2j/models/gnn.py | jiwoncpark/exconvnet | 6775d11ec1c3e7005890e58d16dd07b711861cdf | [
"MIT"
] | 1 | 2020-02-27T20:39:02.000Z | 2020-02-27T20:39:02.000Z | """Various GNN models
"""
import torch
from torch_geometric.nn import GCNConv, GATConv, SAGEConv, GravNetConv
import torch.nn as nn
import torch.nn.functional as F
__all__ = ['GCNNet', 'GATNet', 'SageNet', 'GravNet']
def get_zero_nodes(batch_idx):
"""Get indices of the zeroth nodes in the batch
"""
batch_idx = torch.cat([torch.zeros(1, device=batch_idx.device), batch_idx])
diff = batch_idx[1:] - batch_idx[:-1]
diff[0] = 1
return diff.bool()
class GCNNet(nn.Module):
def __init__(self, in_channels, out_channels,
hidden_channels=256, n_layers=3, dropout=0.0,
kwargs={}):
super(GCNNet, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.hidden_channels = hidden_channels
self.n_layers = n_layers
self.dropout = dropout
self.kwargs = kwargs
self.convs = nn.ModuleList()
for i in range(self.n_layers-1):
n_in = self.in_channels if i == 0 else self.hidden_channels
self.convs.append(GCNConv(n_in,
self.hidden_channels,
aggr='add',
add_self_loops=False,
**self.kwargs))
# Last layer
self.convs.append(GCNConv(self.hidden_channels,
self.out_channels,
aggr='add',
add_self_loops=False,
**self.kwargs))
# self.fc = nn.Linear(self.hidden_channels, out_channels)
def forward(self, data):
x, edge_index, batch = data.x, data.edge_index, data.batch
for i in range(self.n_layers-1):
x = self.convs[i](x, edge_index)
x = F.leaky_relu(x)
x = F.dropout(x, p=self.dropout, training=True)
x = self.convs[-1](x, edge_index)
zero_idx_mask = get_zero_nodes(batch)
x = x[zero_idx_mask, :]
return x
class GATNet(nn.Module):
def __init__(self, in_channels, out_channels,
hidden_channels=256,
kwargs={}, n_layers=3, dropout=0.0):
super(GATNet, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.hidden_channels = hidden_channels
self.n_layers = n_layers
self.dropout = dropout
self.kwargs = kwargs
self.convs = nn.ModuleList()
for i in range(self.n_layers-1):
n_in = self.in_channels if i == 0 else self.hidden_channels
self.convs.append(GATConv(n_in,
self.hidden_channels,
aggr='add',
dropout=self.dropout,
add_self_loops=False,
**self.kwargs))
# Last layer
self.convs.append(GATConv(self.hidden_channels,
self.out_channels,
aggr='add',
dropout=self.dropout,
add_self_loops=False,
**self.kwargs))
# self.fc = nn.Linear(self.hidden_channels, out_channels)
def forward(self, data):
x, edge_index, batch = data.x, data.edge_index, data.batch
zero_idx_mask = get_zero_nodes(batch)
for i in range(self.n_layers-1):
x = self.convs[i](x, edge_index)
x = F.leaky_relu(x)
x = F.dropout(x, p=self.dropout, training=True)
if self.training:
x = self.convs[-1](x, edge_index)
x = x[zero_idx_mask, :]
return x
else:
x, (edge_index, w) = self.convs[-1](x, edge_index,
return_attention_weights=True)
x = x[zero_idx_mask, :]
return x, (edge_index, w)
class SageNet(nn.Module):
def __init__(self, in_channels, out_channels,
hidden_channels=256, n_layers=3, dropout=0.0,
kwargs={}):
super(SageNet, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.hidden_channels = hidden_channels
self.n_layers = n_layers
self.dropout = dropout
self.kwargs = kwargs
self.convs = nn.ModuleList()
for i in range(self.n_layers-1):
n_in = self.in_channels if i == 0 else self.hidden_channels
self.convs.append(SAGEConv(n_in,
self.hidden_channels,
aggr='add',
normalize=True, # otherwise explode
root_weight=False,
**self.kwargs))
# Last layer
self.convs.append(SAGEConv(self.hidden_channels,
self.out_channels,
aggr='add',
normalize=True, # otherwise explode
root_weight=False,
**self.kwargs))
def forward(self, data):
x, edge_index, batch = data.x, data.edge_index, data.batch
for i in range(self.n_layers-1):
x = self.convs[i](x, edge_index)
x = F.leaky_relu(x)
x = F.dropout(x, p=self.dropout, training=True)
x = self.convs[-1](x, edge_index)
zero_idx_mask = get_zero_nodes(batch)
x = x[zero_idx_mask, :]
return x
class GravNet(nn.Module):
def __init__(self, in_channels, out_channels,
hidden_channels=256, n_layers=3, dropout=0.0,
kwargs={}):
super(GravNet, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.hidden_channels = hidden_channels
self.n_layers = n_layers
self.dropout = dropout
self.kwargs = kwargs
self.convs = nn.ModuleList()
for i in range(self.n_layers-1):
n_in = self.in_channels if i == 0 else self.hidden_channels
self.convs.append(GravNetConv(n_in,
self.hidden_channels,
aggr='add',
space_dimensions=3,
propagate_dimensions=2,
k=20,
**self.kwargs))
# Last layer
self.convs.append(GravNetConv(self.hidden_channels,
self.out_channels,
aggr='add',
space_dimensions=3,
propagate_dimensions=2,
k=20,
**self.kwargs))
def forward(self, data):
x, batch = data.x, data.batch # edge information not used
for i in range(self.n_layers-1):
x = self.convs[i](x)
x = F.leaky_relu(x)
x = F.dropout(x, p=self.dropout, training=True)
x = self.convs[-1](x)
zero_idx_mask = get_zero_nodes(batch)
x = x[zero_idx_mask, :]
return x
| 39.888889 | 79 | 0.494628 | 840 | 7,539 | 4.208333 | 0.115476 | 0.10297 | 0.091655 | 0.040736 | 0.841867 | 0.841867 | 0.8314 | 0.780764 | 0.745686 | 0.737199 | 0 | 0.012198 | 0.412787 | 7,539 | 188 | 80 | 40.101064 | 0.786311 | 0.038201 | 0 | 0.775 | 0 | 0 | 0.006916 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05625 | false | 0 | 0.025 | 0 | 0.14375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
225e4495fa60f2a9bb662b836515d9302e1d4a78 | 6,300 | py | Python | adj_mat_func.py | LcRss/DeepLabV2-Pascal-Part-Argmax | 3911ba41f387684c42414682fd8abcc8d028a28d | [
"Apache-2.0"
] | null | null | null | adj_mat_func.py | LcRss/DeepLabV2-Pascal-Part-Argmax | 3911ba41f387684c42414682fd8abcc8d028a28d | [
"Apache-2.0"
] | null | null | null | adj_mat_func.py | LcRss/DeepLabV2-Pascal-Part-Argmax | 3911ba41f387684c42414682fd8abcc8d028a28d | [
"Apache-2.0"
] | null | null | null | import sys
import cv2
import numpy as np
import tensorflow as tf
class adj_mat_func(object):
def __init__(self, batch_size):
super().__init__()
self.batch_size = batch_size
def adj_mat(self, y_true, y_pred):
# Wraps np_mean_iou method and uses it as a TensorFlow op.
# Takes numpy arrays as its arguments and returns numpy arrays as
# its outputs.
return tf.py_func(self.np_adj_func, [y_true, y_pred], tf.float32)
def np_adj_func(self, y_true, y_pred):
adj_mat = np.zeros(shape=(108, 108))
for o in range(self.batch_size):
img = y_true[o]
classes = np.unique(img)
classes = classes[1:]
if 255 in classes:
classes = classes[:-1]
mat_contour = []
for i in range(len(classes)):
value = classes[i]
mask = cv2.inRange(img, int(value), int(value))
_, per, _ = cv2.findContours(image=mask, mode=cv2.RETR_TREE, method=cv2.CHAIN_APPROX_SIMPLE)
mat_total = np.zeros(shape=(1, 2))
for q in range(len(per)):
tmp = per[q]
mat = np.zeros(shape=(len(tmp), 2))
for j in range(len(tmp)):
point = tmp[j]
x = point[0][0]
y = point[0][1]
mat[j][0] = x
mat[j][1] = y
mat_total = np.concatenate((mat_total, mat), axis=0)
mat_contour.append(mat_total[1:])
for i in range(len(classes)):
tmp = mat_contour[i]
for j in range(i + 1, len(classes)):
min_v = sys.maxsize
second_mat = mat_contour[j]
for p in range(len(tmp)):
first_mat = tmp[p]
dif = first_mat - second_mat
dif = dif * dif
sum_mat = np.sum(dif, 1)
sqrt = np.sqrt(sum_mat)
min_tmp = np.min(sqrt)
if min_tmp < min_v:
min_v = min_tmp
if min_v <= 1:
adj_mat[classes[i]][classes[j]] = 1 + adj_mat[classes[i]][classes[j]]
return adj_mat.astype(np.float32)
def np_adj_func_2in1(self, y_true, y_pred):
adj_mat_true = np.zeros(shape=(108, 108))
for o in range(self.batch_size):
img = y_true[o]
classes = np.unique(img)
classes = classes[1:]
if 255 in classes:
classes = classes[:-1]
mat_contour = []
for i in range(len(classes)):
value = classes[i]
mask = cv2.inRange(img, int(value), int(value))
_, per, _ = cv2.findContours(image=mask, mode=cv2.RETR_TREE, method=cv2.CHAIN_APPROX_SIMPLE)
mat_total = np.zeros(shape=(1, 2))
for q in range(len(per)):
tmp = per[q]
mat = np.zeros(shape=(len(tmp), 2))
for j in range(len(tmp)):
point = tmp[j]
x = point[0][0]
y = point[0][1]
mat[j][0] = x
mat[j][1] = y
mat_total = np.concatenate((mat_total, mat), axis=0)
mat_contour.append(mat_total[1:])
for i in range(len(classes)):
tmp = mat_contour[i]
for j in range(i + 1, len(classes)):
min_v = sys.maxsize
second_mat = mat_contour[j]
for p in range(len(tmp)):
first_mat = tmp[p]
dif = first_mat - second_mat
dif = dif * dif
sum_mat = np.sum(dif, 1)
sqrt = np.sqrt(sum_mat)
min_tmp = np.min(sqrt)
if min_tmp < min_v:
min_v = min_tmp
if min_v <= 1:
adj_mat_true = [classes[i]][classes[j]] = 1 + adj_mat_true[classes[i]][classes[j]]
adj_mat_pred = np.zeros(shape=(108, 108))
for o in range(self.batch_size):
img = y_pred[o]
classes = np.unique(img)
classes = classes[1:]
if 255 in classes:
classes = classes[:-1]
mat_contour = []
for i in range(len(classes)):
value = classes[i]
mask = cv2.inRange(img, int(value), int(value))
_, per, _ = cv2.findContours(image=mask, mode=cv2.RETR_TREE, method=cv2.CHAIN_APPROX_SIMPLE)
mat_total = np.zeros(shape=(1, 2))
for q in range(len(per)):
tmp = per[q]
mat = np.zeros(shape=(len(tmp), 2))
for j in range(len(tmp)):
point = tmp[j]
x = point[0][0]
y = point[0][1]
mat[j][0] = x
mat[j][1] = y
mat_total = np.concatenate((mat_total, mat), axis=0)
mat_contour.append(mat_total[1:])
for i in range(len(classes)):
tmp = mat_contour[i]
for j in range(i + 1, len(classes)):
min_v = sys.maxsize
second_mat = mat_contour[j]
for p in range(len(tmp)):
first_mat = tmp[p]
dif = first_mat - second_mat
dif = dif * dif
sum_mat = np.sum(dif, 1)
sqrt = np.sqrt(sum_mat)
min_tmp = np.min(sqrt)
if min_tmp < min_v:
min_v = min_tmp
if min_v <= 1:
adj_mat_pred = [classes[i]][classes[j]] = 1 + adj_mat_pred[classes[i]][classes[j]]
return adj_mat_true.astype(np.float32), adj_mat_pred.astype(np.float32)
| 33.157895 | 108 | 0.437143 | 765 | 6,300 | 3.422222 | 0.118954 | 0.05615 | 0.057296 | 0.02521 | 0.864782 | 0.84492 | 0.84492 | 0.805577 | 0.769672 | 0.769672 | 0 | 0.029567 | 0.457778 | 6,300 | 189 | 109 | 33.333333 | 0.736827 | 0.021111 | 0 | 0.843284 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.029851 | false | 0 | 0.029851 | 0.007463 | 0.089552 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
3f39519f190f3f9f25cdbbc1b6c7edb036314604 | 26,748 | py | Python | tests/adspygoogle/dfp/placement_service_unittest.py | hockeyprincess/google-api-dfp-python | efa82a8d85cbdc90f030db9d168790c55bd8b12a | [
"Apache-2.0"
] | null | null | null | tests/adspygoogle/dfp/placement_service_unittest.py | hockeyprincess/google-api-dfp-python | efa82a8d85cbdc90f030db9d168790c55bd8b12a | [
"Apache-2.0"
] | null | null | null | tests/adspygoogle/dfp/placement_service_unittest.py | hockeyprincess/google-api-dfp-python | efa82a8d85cbdc90f030db9d168790c55bd8b12a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests to cover PlacementService."""
__author__ = 'api.sgrinberg@gmail.com (Stan Grinberg)'
import os
import sys
sys.path.append(os.path.join('..', '..', '..'))
import unittest
from adspygoogle.common import Utils
from tests.adspygoogle.dfp import HTTP_PROXY
from tests.adspygoogle.dfp import SERVER_V201004
from tests.adspygoogle.dfp import SERVER_V201010
from tests.adspygoogle.dfp import SERVER_V201101
from tests.adspygoogle.dfp import SERVER_V201103
from tests.adspygoogle.dfp import VERSION_V201004
from tests.adspygoogle.dfp import VERSION_V201010
from tests.adspygoogle.dfp import VERSION_V201101
from tests.adspygoogle.dfp import VERSION_V201103
from tests.adspygoogle.dfp import client
class PlacementServiceTestV201004(unittest.TestCase):
"""Unittest suite for PlacementService using v201004."""
SERVER = SERVER_V201004
VERSION = VERSION_V201004
client.debug = False
service = None
ad_unit_id1 = '0'
ad_unit_id2 = '0'
ad_unit_id3 = '0'
ad_unit_id4 = '0'
placement1 = None
placement2 = None
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetPlacementService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.ad_unit_id1 == '0' or self.__class__.ad_unit_id2 == '0':
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
network_service = client.GetNetworkService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
root_ad_unit_id = \
network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
ad_units = [
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
}
]
ad_units = inventory_service.CreateAdUnits(ad_units)
self.__class__.ad_unit_id1 = ad_units[0]['id']
self.__class__.ad_unit_id2 = ad_units[1]['id']
self.__class__.ad_unit_id3 = ad_units[2]['id']
self.__class__.ad_unit_id4 = ad_units[3]['id']
def testCreatePlacement(self):
"""Test whether we can create a placement."""
placement = {
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
self.assert_(isinstance(
self.__class__.service.CreatePlacement(placement), tuple))
def testCreatePlacements(self):
"""Test whether we can create a list of placements items."""
placements = [
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
},
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
]
placements = self.__class__.service.CreatePlacements(placements)
self.assert_(isinstance(placements, tuple))
self.__class__.placement1 = placements[0]
self.__class__.placement2 = placements[1]
def testGetPlacement(self):
"""Test whether we can fetch an existing placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.assert_(isinstance(self.__class__.service.GetPlacement(
self.__class__.placement1['id']), tuple))
def testGetPlacementsByStatement(self):
"""Test whether we can fetch a list of existing placements that match given
statement."""
if not self.__class__.placement1:
self.testCreatePlacements()
filter_statement = {'query': 'WHERE id = \'%s\' ORDER BY name LIMIT 1'
% self.__class__.placement1['id']}
self.assert_(isinstance(
self.__class__.service.GetPlacementsByStatement(filter_statement),
tuple))
def testPerformPlacementAction(self):
"""Test whether we can deactivate a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
action = {'type': 'DeactivatePlacements'}
filter_statement = {'query': 'WHERE status = \'ACTIVE\''}
self.assert_(isinstance(
self.__class__.service.PerformPlacementAction(action, filter_statement),
tuple))
def testUpdatePlacement(self):
"""Test whether we can update a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' More description.'
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
self.assertEqual(placement[0]['description'],
self.__class__.placement1['description'])
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id3)
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
def testUpdatePlacements(self):
"""Test whether we can update a list of placements."""
if not self.__class__.placement1 or not self.__class__.placement2:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' Even more description.'
self.__class__.placement2['description'] += ' Even more description.'
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
self.__class__.placement2['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
class PlacementServiceTestV201010(unittest.TestCase):
"""Unittest suite for PlacementService using v201010."""
SERVER = SERVER_V201010
VERSION = VERSION_V201010
client.debug = False
service = None
ad_unit_id1 = '0'
ad_unit_id2 = '0'
ad_unit_id3 = '0'
ad_unit_id4 = '0'
placement1 = None
placement2 = None
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetPlacementService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.ad_unit_id1 == '0' or self.__class__.ad_unit_id2 == '0':
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
network_service = client.GetNetworkService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
root_ad_unit_id = \
network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
ad_units = [
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
}
]
ad_units = inventory_service.CreateAdUnits(ad_units)
self.__class__.ad_unit_id1 = ad_units[0]['id']
self.__class__.ad_unit_id2 = ad_units[1]['id']
self.__class__.ad_unit_id3 = ad_units[2]['id']
self.__class__.ad_unit_id4 = ad_units[3]['id']
def testCreatePlacement(self):
"""Test whether we can create a placement."""
placement = {
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
self.assert_(isinstance(
self.__class__.service.CreatePlacement(placement), tuple))
def testCreatePlacements(self):
"""Test whether we can create a list of placements items."""
placements = [
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
},
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
]
placements = self.__class__.service.CreatePlacements(placements)
self.assert_(isinstance(placements, tuple))
self.__class__.placement1 = placements[0]
self.__class__.placement2 = placements[1]
def testGetPlacement(self):
"""Test whether we can fetch an existing placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.assert_(isinstance(self.__class__.service.GetPlacement(
self.__class__.placement1['id']), tuple))
def testGetPlacementsByStatement(self):
"""Test whether we can fetch a list of existing placements that match given
statement."""
if not self.__class__.placement1:
self.testCreatePlacements()
filter_statement = {'query': 'WHERE id = \'%s\' ORDER BY name LIMIT 1'
% self.__class__.placement1['id']}
self.assert_(isinstance(
self.__class__.service.GetPlacementsByStatement(filter_statement),
tuple))
def testPerformPlacementAction(self):
"""Test whether we can deactivate a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
action = {'type': 'DeactivatePlacements'}
filter_statement = {'query': 'WHERE status = \'ACTIVE\''}
self.assert_(isinstance(
self.__class__.service.PerformPlacementAction(action, filter_statement),
tuple))
def testUpdatePlacement(self):
"""Test whether we can update a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' More description.'
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
self.assertEqual(placement[0]['description'],
self.__class__.placement1['description'])
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id3)
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
def testUpdatePlacements(self):
"""Test whether we can update a list of placements."""
if not self.__class__.placement1 or not self.__class__.placement2:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' Even more description.'
self.__class__.placement2['description'] += ' Even more description.'
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
self.__class__.placement2['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
class PlacementServiceTestV201101(unittest.TestCase):
"""Unittest suite for PlacementService using v201101."""
SERVER = SERVER_V201101
VERSION = VERSION_V201101
client.debug = False
service = None
ad_unit_id1 = '0'
ad_unit_id2 = '0'
ad_unit_id3 = '0'
ad_unit_id4 = '0'
placement1 = None
placement2 = None
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetPlacementService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.ad_unit_id1 == '0' or self.__class__.ad_unit_id2 == '0':
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
network_service = client.GetNetworkService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
root_ad_unit_id = \
network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
ad_units = [
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
}
]
ad_units = inventory_service.CreateAdUnits(ad_units)
self.__class__.ad_unit_id1 = ad_units[0]['id']
self.__class__.ad_unit_id2 = ad_units[1]['id']
self.__class__.ad_unit_id3 = ad_units[2]['id']
self.__class__.ad_unit_id4 = ad_units[3]['id']
def testCreatePlacement(self):
"""Test whether we can create a placement."""
placement = {
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
self.assert_(isinstance(
self.__class__.service.CreatePlacement(placement), tuple))
def testCreatePlacements(self):
"""Test whether we can create a list of placements items."""
placements = [
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
},
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
]
placements = self.__class__.service.CreatePlacements(placements)
self.assert_(isinstance(placements, tuple))
self.__class__.placement1 = placements[0]
self.__class__.placement2 = placements[1]
def testGetPlacement(self):
"""Test whether we can fetch an existing placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.assert_(isinstance(self.__class__.service.GetPlacement(
self.__class__.placement1['id']), tuple))
def testGetPlacementsByStatement(self):
"""Test whether we can fetch a list of existing placements that match given
statement."""
if not self.__class__.placement1:
self.testCreatePlacements()
filter_statement = {'query': 'WHERE id = \'%s\' ORDER BY name LIMIT 1'
% self.__class__.placement1['id']}
self.assert_(isinstance(
self.__class__.service.GetPlacementsByStatement(filter_statement),
tuple))
def testPerformPlacementAction(self):
"""Test whether we can deactivate a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
action = {'type': 'DeactivatePlacements'}
filter_statement = {'query': 'WHERE status = \'ACTIVE\''}
self.assert_(isinstance(
self.__class__.service.PerformPlacementAction(action, filter_statement),
tuple))
def testUpdatePlacement(self):
"""Test whether we can update a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' More description.'
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
self.assertEqual(placement[0]['description'],
self.__class__.placement1['description'])
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id3)
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
def testUpdatePlacements(self):
"""Test whether we can update a list of placements."""
if not self.__class__.placement1 or not self.__class__.placement2:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' Even more description.'
self.__class__.placement2['description'] += ' Even more description.'
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
self.__class__.placement2['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
class PlacementServiceTestV201103(unittest.TestCase):
"""Unittest suite for PlacementService using v201103."""
SERVER = SERVER_V201103
VERSION = VERSION_V201103
client.debug = False
service = None
ad_unit_id1 = '0'
ad_unit_id2 = '0'
ad_unit_id3 = '0'
ad_unit_id4 = '0'
placement1 = None
placement2 = None
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetPlacementService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.ad_unit_id1 == '0' or self.__class__.ad_unit_id2 == '0':
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
network_service = client.GetNetworkService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
root_ad_unit_id = \
network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
ad_units = [
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
},
{
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'sizes': [{'width': '300', 'height': '250'}]
}
]
ad_units = inventory_service.CreateAdUnits(ad_units)
self.__class__.ad_unit_id1 = ad_units[0]['id']
self.__class__.ad_unit_id2 = ad_units[1]['id']
self.__class__.ad_unit_id3 = ad_units[2]['id']
self.__class__.ad_unit_id4 = ad_units[3]['id']
def testCreatePlacement(self):
"""Test whether we can create a placement."""
placement = {
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
self.assert_(isinstance(
self.__class__.service.CreatePlacement(placement), tuple))
def testCreatePlacements(self):
"""Test whether we can create a list of placements items."""
placements = [
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
},
{
'name': 'Placement #%s' % Utils.GetUniqueName(),
'description': 'Description.',
'targetedAdUnitIds': [self.__class__.ad_unit_id1,
self.__class__.ad_unit_id2]
}
]
placements = self.__class__.service.CreatePlacements(placements)
self.assert_(isinstance(placements, tuple))
self.__class__.placement1 = placements[0]
self.__class__.placement2 = placements[1]
def testGetPlacement(self):
"""Test whether we can fetch an existing placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.assert_(isinstance(self.__class__.service.GetPlacement(
self.__class__.placement1['id']), tuple))
def testGetPlacementsByStatement(self):
"""Test whether we can fetch a list of existing placements that match given
statement."""
if not self.__class__.placement1:
self.testCreatePlacements()
filter_statement = {'query': 'WHERE id = \'%s\' ORDER BY name LIMIT 1'
% self.__class__.placement1['id']}
self.assert_(isinstance(
self.__class__.service.GetPlacementsByStatement(filter_statement),
tuple))
def testPerformPlacementAction(self):
"""Test whether we can deactivate a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
action = {'type': 'DeactivatePlacements'}
filter_statement = {'query': 'WHERE status = \'ACTIVE\''}
self.assert_(isinstance(
self.__class__.service.PerformPlacementAction(action, filter_statement),
tuple))
def testUpdatePlacement(self):
"""Test whether we can update a placement."""
if not self.__class__.placement1:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' More description.'
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
self.assertEqual(placement[0]['description'],
self.__class__.placement1['description'])
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id3)
placement = self.__class__.service.UpdatePlacement(
self.__class__.placement1)
self.assert_(isinstance(placement, tuple))
def testUpdatePlacements(self):
"""Test whether we can update a list of placements."""
if not self.__class__.placement1 or not self.__class__.placement2:
self.testCreatePlacements()
self.__class__.placement1['description'] += ' Even more description.'
self.__class__.placement2['description'] += ' Even more description.'
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
self.__class__.placement1['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
self.__class__.placement2['targetedAdUnitIds'].append(
self.__class__.ad_unit_id4)
placements = self.__class__.service.UpdatePlacements([
self.__class__.placement1, self.__class__.placement2])
self.assert_(isinstance(placements, tuple))
def makeTestSuiteV201004():
"""Set up test suite using v201004.
Returns:
TestSuite test suite using v201004.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(PlacementServiceTestV201004))
return suite
def makeTestSuiteV201010():
"""Set up test suite using v201010.
Returns:
TestSuite test suite using v201010.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(PlacementServiceTestV201010))
return suite
def makeTestSuiteV201101():
"""Set up test suite using v201101.
Returns:
TestSuite test suite using v201101.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(PlacementServiceTestV201101))
return suite
def makeTestSuiteV201103():
"""Set up test suite using v201103.
Returns:
TestSuite test suite using v201103.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(PlacementServiceTestV201103))
return suite
if __name__ == '__main__':
suite_v201004 = makeTestSuiteV201004()
suite_v201010 = makeTestSuiteV201010()
suite_v201101 = makeTestSuiteV201101()
suite_v201103 = makeTestSuiteV201103()
alltests = unittest.TestSuite([suite_v201004, suite_v201010, suite_v201101,
suite_v201103])
unittest.main(defaultTest='alltests')
| 37.15 | 80 | 0.654142 | 2,747 | 26,748 | 5.907535 | 0.077175 | 0.122011 | 0.079615 | 0.05546 | 0.908122 | 0.894257 | 0.871518 | 0.845637 | 0.845637 | 0.845637 | 0 | 0.031496 | 0.224877 | 26,748 | 719 | 81 | 37.201669 | 0.751218 | 0.022768 | 0 | 0.81295 | 0 | 0 | 0.110163 | 0.004497 | 0 | 0 | 0 | 0 | 0.071942 | 0 | null | null | 0 | 0.02518 | null | null | 0.007194 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
450ee332da84ac0554b6084290903570e44310f7 | 7,516 | py | Python | stonesoup/updater/tests/test_pointprocess.py | Isaac-JenkinsRA/Stone-Soup | 54c9c7dca8162dadaa58e85933cf10a0f86ce1e1 | [
"MIT"
] | 1 | 2020-07-21T15:20:20.000Z | 2020-07-21T15:20:20.000Z | stonesoup/updater/tests/test_pointprocess.py | Isaac-JenkinsRA/Stone-Soup | 54c9c7dca8162dadaa58e85933cf10a0f86ce1e1 | [
"MIT"
] | null | null | null | stonesoup/updater/tests/test_pointprocess.py | Isaac-JenkinsRA/Stone-Soup | 54c9c7dca8162dadaa58e85933cf10a0f86ce1e1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Test for updater.gaussianmixture module"""
import pytest
import numpy as np
from scipy.stats import multivariate_normal
from stonesoup.types.hypothesis import SingleHypothesis
from stonesoup.types.multihypothesis import MultipleHypothesis
from stonesoup.types.prediction import GaussianMeasurementPrediction
from stonesoup.types.state import GaussianState
from stonesoup.updater.kalman import (
KalmanUpdater, ExtendedKalmanUpdater, UnscentedKalmanUpdater)
from stonesoup.updater.pointprocess import PHDUpdater, LCCUpdater
@pytest.mark.parametrize(
"UpdaterClass",
[
# Standard Kalman
KalmanUpdater,
# Extended Kalman
ExtendedKalmanUpdater,
# Unscented Kalman
UnscentedKalmanUpdater,
],
ids=["standard", "extended", "unscented"]
)
def test_phd_init(UpdaterClass, measurement_model, prediction, measurement):
# Initialise a kalman updater
underlying_updater = UpdaterClass(measurement_model=measurement_model)
phd_updater = PHDUpdater(updater=underlying_updater)
assert isinstance(phd_updater.updater, UpdaterClass)
@pytest.mark.parametrize(
"UpdaterClass",
[
# Standard Kalman
KalmanUpdater,
# Extended Kalman
ExtendedKalmanUpdater,
# Unscented Kalman
UnscentedKalmanUpdater,
],
ids=["standard", "extended", "unscented"]
)
def test_phd_single_component_update(UpdaterClass, measurement_model,
prediction, measurement):
eval_measurement_prediction = GaussianMeasurementPrediction(
measurement_model.matrix()@prediction.mean,
measurement_model.matrix()@prediction.covar@measurement_model.matrix().T
+ measurement_model.covar(),
cross_covar=prediction.covar@measurement_model.matrix().T)
kalman_gain = eval_measurement_prediction.cross_covar@np.linalg.inv(
eval_measurement_prediction.covar)
eval_posterior = GaussianState(
prediction.mean
+ kalman_gain@(measurement.state_vector
- eval_measurement_prediction.mean),
prediction.covar
- kalman_gain@eval_measurement_prediction.covar@kalman_gain.T)
underlying_updater = UpdaterClass(measurement_model=measurement_model)
measurement_prediction = underlying_updater.predict_measurement(prediction)
phd_updater = PHDUpdater(updater=underlying_updater, prob_detection=0.9)
hypotheses = [MultipleHypothesis([SingleHypothesis(
prediction=prediction,
measurement=measurement)]),
MultipleHypothesis([SingleHypothesis(
prediction=prediction,
measurement=None)])]
# print(hypotheses)
updated_mixture = phd_updater.update(hypotheses)
# One for updated component, one for missed detection
assert len(updated_mixture) == 2
# Check updated component
updated_component = updated_mixture[0]
assert(np.allclose(updated_component.mean, eval_posterior.mean, 0,
atol=1.e-14))
assert(np.allclose(updated_component.covar, eval_posterior.covar, 0,
atol=1.e-14))
assert(updated_component.timestamp == measurement.timestamp)
prob_detection = 0.9
prob_survival = 1
q = multivariate_normal.pdf(
measurement.state_vector.flatten(),
mean=measurement_prediction.mean.flatten(),
cov=measurement_prediction.covar
)
clutter_density = 1e-26
new_weight = (prob_detection*prediction.weight*q*prob_survival) / \
((prob_detection*prediction.weight*q*prob_survival)+clutter_density)
assert(updated_component.weight == new_weight)
# Check miss detected component
miss_detected_component = updated_mixture[1]
assert(np.allclose(miss_detected_component.mean, prediction.mean, 0,
atol=1.e-14))
assert(np.allclose(miss_detected_component.covar, prediction.covar, 0,
atol=1.e-14))
assert(miss_detected_component.timestamp == prediction.timestamp)
l1 = 1
assert(miss_detected_component.weight == prediction.weight *
(1-prob_detection)*l1)
@pytest.mark.parametrize(
"UpdaterClass",
[
# Standard Kalman
KalmanUpdater,
# Extended Kalman
ExtendedKalmanUpdater,
# Unscented Kalman
UnscentedKalmanUpdater,
],
ids=["standard", "extended", "unscented"]
)
def test_lcc_single_component_update(UpdaterClass, measurement_model,
prediction, measurement):
eval_measurement_prediction = GaussianMeasurementPrediction(
measurement_model.matrix()@prediction.mean,
measurement_model.matrix()@prediction.covar@measurement_model.matrix().T
+ measurement_model.covar(),
cross_covar=prediction.covar@measurement_model.matrix().T)
kalman_gain = eval_measurement_prediction.cross_covar@np.linalg.inv(
eval_measurement_prediction.covar)
eval_posterior = GaussianState(
prediction.mean
+ kalman_gain@(measurement.state_vector
- eval_measurement_prediction.mean),
prediction.covar
- kalman_gain@eval_measurement_prediction.covar@kalman_gain.T)
underlying_updater = UpdaterClass(measurement_model=measurement_model)
measurement_prediction = underlying_updater.predict_measurement(prediction)
phd_updater = LCCUpdater(updater=underlying_updater, prob_detection=0.9)
hypotheses = [MultipleHypothesis([SingleHypothesis(
prediction=prediction,
measurement=measurement)]),
MultipleHypothesis([SingleHypothesis(
prediction=prediction,
measurement=None)])]
# print(hypotheses)
updated_mixture = phd_updater.update(hypotheses)
# One for updated component, one for missed detection
assert len(updated_mixture) == 2
# Check updated component
updated_component = updated_mixture[0]
assert(np.allclose(updated_component.mean, eval_posterior.mean, 0,
atol=1.e-14))
assert(np.allclose(updated_component.covar, eval_posterior.covar, 0,
atol=1.e-14))
assert(updated_component.timestamp == measurement.timestamp)
prob_detection = 0.9
prob_survival = 1
q = multivariate_normal.pdf(
measurement.state_vector.flatten(),
mean=measurement_prediction.mean.flatten(),
cov=measurement_prediction.covar
)
clutter_density = 1e-26
new_weight = (prob_detection*prediction.weight*q*prob_survival) / \
((prob_detection*prediction.weight*q*prob_survival)+clutter_density)
assert(updated_component.weight == new_weight)
# Check miss detected component
miss_detected_component = updated_mixture[1]
assert(np.allclose(miss_detected_component.mean, prediction.mean, 0,
atol=1.e-14))
assert(np.allclose(miss_detected_component.covar, prediction.covar, 0,
atol=1.e-14))
assert(miss_detected_component.timestamp == prediction.timestamp)
l1 = 1
assert(miss_detected_component.weight ==
prediction.weight*(1-prob_detection)*l1)
| 42.224719 | 80 | 0.67496 | 720 | 7,516 | 6.826389 | 0.141667 | 0.061851 | 0.051272 | 0.011394 | 0.887284 | 0.877314 | 0.866328 | 0.853917 | 0.853917 | 0.853917 | 0 | 0.011064 | 0.242416 | 7,516 | 177 | 81 | 42.463277 | 0.852125 | 0.064529 | 0 | 0.8 | 0 | 0 | 0.015841 | 0 | 0 | 0 | 0 | 0 | 0.131034 | 1 | 0.02069 | false | 0 | 0.062069 | 0 | 0.082759 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
18b7bcdab450c73107afa70325ce25fac7387af5 | 174 | py | Python | pages/views.py | nitinraturi/django-react-webpack | 4e20eab507f637f2503e098a9c014108b23ebbaf | [
"MIT"
] | 1 | 2020-11-01T18:14:55.000Z | 2020-11-01T18:14:55.000Z | pages/views.py | raturitechmedia/django-react-webpack | 4e20eab507f637f2503e098a9c014108b23ebbaf | [
"MIT"
] | 4 | 2021-06-04T23:36:21.000Z | 2022-02-10T14:36:24.000Z | pages/views.py | raturitechmedia/django-react-webpack | 4e20eab507f637f2503e098a9c014108b23ebbaf | [
"MIT"
] | 1 | 2020-12-11T20:45:17.000Z | 2020-12-11T20:45:17.000Z | from django.shortcuts import render
def index1(request):
return render(request, 'index1.html', {})
def index2(request):
return render(request, 'index2.html', {})
| 17.4 | 45 | 0.695402 | 21 | 174 | 5.761905 | 0.52381 | 0.214876 | 0.31405 | 0.429752 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027397 | 0.16092 | 174 | 9 | 46 | 19.333333 | 0.80137 | 0 | 0 | 0 | 0 | 0 | 0.126437 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.4 | false | 0 | 0.2 | 0.4 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
18dab552f1c0634702300d3827308b3d6e84635f | 147 | py | Python | rogc/tests/test_estimator.py | vcoutasso/ROGC | f7b99897097ea3a3f1740be8990dc4316505d680 | [
"MIT"
] | 1 | 2021-09-23T20:21:55.000Z | 2021-09-23T20:21:55.000Z | rogc/tests/test_estimator.py | vcoutasso/ROGC | f7b99897097ea3a3f1740be8990dc4316505d680 | [
"MIT"
] | null | null | null | rogc/tests/test_estimator.py | vcoutasso/ROGC | f7b99897097ea3a3f1740be8990dc4316505d680 | [
"MIT"
] | null | null | null | from sklearn.utils.estimator_checks import check_estimator
from rogc import ROGC
def test_check_estimator():
return check_estimator(ROGC())
| 18.375 | 58 | 0.809524 | 20 | 147 | 5.7 | 0.55 | 0.368421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129252 | 147 | 7 | 59 | 21 | 0.890625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 7 |
7a0f0d3a1a1cebede490913b57a1fcbe54b5006c | 7,571 | py | Python | test/test_gather_build_data.py | emilybache/buildstats | 89b1b602aefcb47acafc4ecbbf715ec2092cbb9d | [
"MIT"
] | 2 | 2021-09-16T15:23:36.000Z | 2021-09-28T15:24:55.000Z | test/test_gather_build_data.py | emilybache/buildstats | 89b1b602aefcb47acafc4ecbbf715ec2092cbb9d | [
"MIT"
] | null | null | null | test/test_gather_build_data.py | emilybache/buildstats | 89b1b602aefcb47acafc4ecbbf715ec2092cbb9d | [
"MIT"
] | 1 | 2021-07-22T20:13:46.000Z | 2021-07-22T20:13:46.000Z | import datetime
from io import StringIO
import gather_build_data
from gather_build_data import filter_gradle_builds, Build, Sync, output_filename
def test_filter_gradle_builds():
text = """\
2021-07-14 15:08:52,831 [ 678061] INFO - g.FileBasedIndexProjectHandler - Reindexing refreshed files: 0 to update, calculated in 0ms
2021-07-14 15:20:21,542 [1366772] INFO - ild.invoker.GradleBuildInvoker - Gradle build finished in 16 m 5 s 163 ms
at com.intellij.openapi.application.impl.ApplicationImpl.runIntendedWriteActionOnCurrentThread(ApplicationImpl.java:808)
"""
builds = filter_gradle_builds(StringIO(text))
assert list(builds) == [
Build(when="2021-07-14 15:20:21,542", time_taken="16 m 5 s 163 ms ", outcome="finished",
tasks="")]
def test_build_matcher():
line = "2021-07-14 15:20:21,542 [1366772] INFO - ild.invoker.GradleBuildInvoker - Gradle build finished in 16 m 5 s 163 ms\n"
matches = gather_build_data.GRADLE_BUILD_END.match(line)
assert matches.group(1) == "2021-07-14 15:20:21,542"
assert matches.group(2) == "finished"
assert matches.group(3) == "16 m 5 s 163 ms"
def test_task_matcher():
line = "2021-07-14 16:50:59,433 [6804663] INFO - ild.invoker.GradleBuildInvoker - About to execute Gradle tasks: [clean]\n"
matches = gather_build_data.GRADLE_BUILD_START.match(line)
assert matches.group(1) == "clean"
def test_task_matcher_multiple_tasks():
line = "2021-07-14 16:50:59,433 [6804663] INFO - ild.invoker.GradleBuildInvoker - About to execute Gradle tasks: [:assemble-xyz, :testClasses]\n"
matches = gather_build_data.GRADLE_BUILD_START.match(line)
assert matches.group(1) == ":assemble-xyz, :testClasses"
def test_next_match():
text = """\
2021-07-14 16:50:59,433 [6804663] INFO - ild.invoker.GradleBuildInvoker - About to execute Gradle tasks: [clean]
2021-07-14 16:50:59,667 [6804897] INFO - ild.invoker.GradleBuildInvoker - Gradle build finished in 214 ms
"""
matches = gather_build_data.next_match(StringIO(text),
[gather_build_data.NamedRegex(gather_build_data.GRADLE_BUILD_END, "build"),
gather_build_data.NamedRegex(gather_build_data.GRADLE_BUILD_START, "tasks")]
)
assert matches.__next__()[0] == "tasks"
assert matches.__next__()[0] == "build"
def test_filter_gradle_builds():
text = """\
2021-07-14 16:50:59,433 [6804663] INFO - ild.invoker.GradleBuildInvoker - About to execute Gradle tasks: [clean]
2021-07-14 16:50:59,439 [6804669] INFO - ild.invoker.GradleBuildInvoker - About to execute Gradle tasks: [clean]
2021-07-14 16:50:59,441 [6804671] INFO - s.plugins.gradle.GradleManager - Instructing gradle to use java from /Users/emilybache/Library/Application Support/JetBrains/Toolbox/apps/AndroidStudio/ch-0/202.7486908/Android Studio.app/Contents/jre/jdk/Contents/Home
2021-07-14 16:50:59,451 [6804681] INFO - ild.invoker.GradleBuildInvoker - Build command line options: [-Pandroid.injected.invoked.from.ide=true, -Pandroid.injected.studio.version=202.7660.26.42.7486908, -Pandroid.injected.attribution.file.location=/Users/emilybache/workspace/app/buildSrc/.gradle]
2021-07-14 16:50:59,452 [6804682] INFO - xecution.GradleExecutionHelper - Passing command-line args to Gradle Tooling API: -Pandroid.injected.invoked.from.ide=true -Pandroid.injected.studio.version=202.7660.26.42.7486908 -Pandroid.injected.attribution.file.location=/Users/emilybache/workspace/app/buildSrc/.gradle
2021-07-14 16:50:59,646 [6804876] INFO - s.plugins.gradle.GradleManager - Instructing gradle to use java from /Users/emilybache/Library/Application Support/JetBrains/Toolbox/apps/AndroidStudio/ch-0/202.7486908/Android Studio.app/Contents/jre/jdk/Contents/Home
2021-07-14 16:50:59,650 [6804880] INFO - ild.invoker.GradleBuildInvoker - Build command line options: [-Pandroid.injected.invoked.from.ide=true, -Pandroid.injected.studio.version=202.7660.26.42.7486908, -Pandroid.injected.attribution.file.location=/Users/emilybache/workspace/app/.gradle]
2021-07-14 16:50:59,651 [6804881] INFO - xecution.GradleExecutionHelper - Passing command-line args to Gradle Tooling API: -Pandroid.injected.invoked.from.ide=true -Pandroid.injected.studio.version=202.7660.26.42.7486908 -Pandroid.injected.attribution.file.location=/Users/emilybache/workspace/app/.gradle
2021-07-14 16:50:59,667 [6804897] INFO - ild.invoker.GradleBuildInvoker - Gradle build finished in 214 ms
"""
builds = filter_gradle_builds(StringIO(text))
assert list(builds) == [
Build(when="2021-07-14 16:50:59,667", time_taken="214 ms ", outcome="finished", tasks="clean")]
def test_parse():
assert Build(when="2021-07-14 16:50:59,667", time_taken="214 ms ", outcome="finished", tasks="clean") == eval(
"""Build(when="2021-07-14 16:50:59,667", time_taken="214 ms ", outcome="finished", tasks="clean")""")
def test_filename():
d = datetime.date(2021, 7, 15)
assert "2021-07-15-emily.log" == output_filename(user="emily", date=d)
def test_output_to_file():
text = """\
2021-07-14 15:08:52,831 [ 678061] INFO - g.FileBasedIndexProjectHandler - Reindexing refreshed files: 0 to update, calculated in 0ms
2021-07-14 15:20:21,542 [1366772] INFO - ild.invoker.GradleBuildInvoker - Gradle build finished in 16 m 5 s 163 ms
at com.intellij.openapi.application.impl.ApplicationImpl.runIntendedWriteActionOnCurrentThread(ApplicationImpl.java:808)
"""
output = StringIO()
gather_build_data.parse_builds(StringIO(text), output)
assert output.getvalue() == f"""{Build(when="2021-07-14 15:20:21,542", time_taken="16 m 5 s 163 ms ", outcome="finished", tasks="")}\n"""
def test_to_csv():
build = Build(when='2021-07-15 16:37:12,979', time_taken='3 m 28 s 451 ms ', outcome='finished', tasks='clean')
assert str(build.to_csv()) == """2021-07-15 16:37:12,979, Build, 208.451, finished, clean"""
def test_parse_to_secs():
assert gather_build_data.parse_to_secs('252 ms ') == 0.252
assert gather_build_data.parse_to_secs('17 s 252 ms ') == 17.252
assert gather_build_data.parse_to_secs('2 m 17 s 252 ms ') == 137.252
def test_filter_sync_events():
text = """\
2021-07-13 13:46:35,105 [14005541] INFO - e.project.sync.GradleSyncState - Started single-variant sync with Gradle for project 'acme-project'.
2021-07-14 09:02:55,180 [ 32416] INFO - idGradleProjectStartupActivity - Requesting Gradle sync (Cannot find any of:
2021-07-13 13:51:14,719 [14285155] INFO - e.project.sync.GradleSyncState - Gradle sync finished in 4 m 39 s 614 ms
"""
syncs = filter_gradle_builds(StringIO(text))
assert list(syncs) == [
Sync(when="2021-07-13 13:51:14,719", time_taken="4 m 39 s 614 ms ", outcome="finished", project="acme-project")]
def test_sync_matcher():
line = "2021-07-13 13:51:14,719 [14285155] INFO - e.project.sync.GradleSyncState - Gradle sync finished in 4 m 39 s 614 ms \n"
matches = gather_build_data.GRADLE_SYNC_END.match(line)
assert matches.group(1) == "2021-07-13 13:51:14,719"
assert matches.group(2) == "finished"
assert matches.group(3) == "4 m 39 s 614 ms "
def test_sync_project_matcher():
line = "2021-07-13 13:46:35,105 [14005541] INFO - e.project.sync.GradleSyncState - Started single-variant sync with Gradle for project 'acme-project'.\n"
matches = gather_build_data.GRADLE_SYNC_START.match(line)
assert matches.group(1) == 'acme-project'
| 59.148438 | 317 | 0.719456 | 1,124 | 7,571 | 4.744662 | 0.185053 | 0.038252 | 0.037502 | 0.030002 | 0.829552 | 0.812488 | 0.801238 | 0.749109 | 0.735984 | 0.676355 | 0 | 0.149915 | 0.14714 | 7,571 | 127 | 318 | 59.614173 | 0.676011 | 0 | 0 | 0.315789 | 0 | 0.284211 | 0.632713 | 0.265962 | 0 | 0 | 0 | 0 | 0.221053 | 1 | 0.147368 | false | 0.021053 | 0.042105 | 0 | 0.189474 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e1b1965f68869bae4283c4a07c5b1ddbeb86a33a | 140 | py | Python | autogoal/ml/__init__.py | yacth/autogoal | a55c1534161e850587e2ca3533aa2fd5ae28569e | [
"MIT"
] | null | null | null | autogoal/ml/__init__.py | yacth/autogoal | a55c1534161e850587e2ca3533aa2fd5ae28569e | [
"MIT"
] | null | null | null | autogoal/ml/__init__.py | yacth/autogoal | a55c1534161e850587e2ca3533aa2fd5ae28569e | [
"MIT"
] | null | null | null | from autogoal.ml._automl import AutoML
# from autogoal.ml._metalearning import DatasetFeatureExtractor, DatasetFeatureLogger, LearnerMedia
| 35 | 99 | 0.864286 | 14 | 140 | 8.5 | 0.642857 | 0.201681 | 0.235294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085714 | 140 | 3 | 100 | 46.666667 | 0.929688 | 0.692857 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e1c847d68b60ce9cfaa9b89c7654103c6bcb7078 | 191 | py | Python | AnswerManagement/admin.py | Anmol3199/Ms-Engage2021 | 1855da7d0ac83cd3601cc62a93c089b79c9fa677 | [
"MIT"
] | 7 | 2020-10-19T03:37:10.000Z | 2022-03-24T19:33:30.000Z | AnswerManagement/admin.py | AnkushCh/Finalproject | 880d29390043a506c8c4f570b8005b9f4660454b | [
"MIT"
] | 1 | 2020-12-02T14:53:43.000Z | 2020-12-02T15:11:55.000Z | AnswerManagement/admin.py | ssoad/OnlineExamSystem | ae57e9acb32f1804e86ca1f9e0718ef1280ac289 | [
"MIT"
] | 4 | 2020-10-19T00:59:53.000Z | 2021-11-28T12:19:40.000Z | from django.contrib import admin
from .models import Answer, ExamineeAnswer, ExamineeMCQAnswer
# Register your models here.
admin.site.register([Answer, ExamineeAnswer, ExamineeMCQAnswer])
| 27.285714 | 64 | 0.82199 | 21 | 191 | 7.47619 | 0.619048 | 0.254777 | 0.471338 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.104712 | 191 | 6 | 65 | 31.833333 | 0.918129 | 0.136126 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.666667 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
e1cdc3a38a9daa49ff4cff0df4282460bd08289d | 867 | py | Python | stage0/make.py | silentxnl/pwn2own2018 | 96c16ca43f0d9ec0a64f3da30f2d0798ed70582c | [
"MIT"
] | 742 | 2018-11-08T00:40:01.000Z | 2022-03-13T16:43:13.000Z | stage0/make.py | timwr/pwn2own2018 | 96c16ca43f0d9ec0a64f3da30f2d0798ed70582c | [
"MIT"
] | null | null | null | stage0/make.py | timwr/pwn2own2018 | 96c16ca43f0d9ec0a64f3da30f2d0798ed70582c | [
"MIT"
] | 151 | 2018-11-08T01:04:54.000Z | 2022-03-19T09:36:09.000Z | #!/usr/bin/env python3
EXPORTS = [
{'path': 'test.html', 'content_type': 'text/html; charset=UTF-8'},
{'path': 'index.html', 'content_type': 'text/html; charset=UTF-8', 'aliases': ['']},
{'path': 'pwn.html', 'content_type': 'text/html; charset=UTF-8'},
{'path': 'ready.js', 'content_type': 'text/javascript; charset=UTF-8'},
{'path': 'logging.js', 'content_type': 'text/javascript; charset=UTF-8'},
{'path': 'shell.js', 'content_type': 'text/javascript; charset=UTF-8'},
{'path': 'offsets.js', 'content_type': 'text/javascript; charset=UTF-8'},
{'path': 'utils.js', 'content_type': 'text/javascript; charset=UTF-8'},
{'path': 'int64.js', 'content_type': 'text/javascript; charset=UTF-8'},
{'path': 'pwn.js', 'content_type': 'text/javascript; charset=UTF-8'},
]
| 57.8 | 93 | 0.573241 | 106 | 867 | 4.59434 | 0.235849 | 0.225873 | 0.308008 | 0.246407 | 0.821355 | 0.821355 | 0.821355 | 0.821355 | 0.673511 | 0 | 0 | 0.018492 | 0.189158 | 867 | 14 | 94 | 61.928571 | 0.674253 | 0.024221 | 0 | 0 | 0 | 0 | 0.631953 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
bedecdfb42df2369ac8f2aa00bb9f5c5fac2faa7 | 258 | py | Python | example_preprocess_module.py | freewillfx-azenqos/azm_db_merge | 1a221e101bfa70d2ced19d165ce25607b69e5078 | [
"Apache-2.0"
] | 5 | 2017-01-10T00:53:08.000Z | 2020-09-16T06:28:14.000Z | example_preprocess_module.py | freewillfx-azenqos/azm_db_merge | 1a221e101bfa70d2ced19d165ce25607b69e5078 | [
"Apache-2.0"
] | 3 | 2017-01-10T03:48:56.000Z | 2018-11-27T05:46:30.000Z | example_preprocess_module.py | freewillfx-azenqos/azm_db_merge | 1a221e101bfa70d2ced19d165ce25607b69e5078 | [
"Apache-2.0"
] | 7 | 2017-08-22T15:30:11.000Z | 2020-08-05T13:42:37.000Z |
def preprocess(dir_processing_azm, azm_file_path):
print("hello from example_preprocess_module.preprocess(dir_processing_azm) - dir_processing_azm: ",dir_processing_azm)
print("goodbye from example_preprocess_module.preprocess(dir_processing_azm)")
| 51.6 | 122 | 0.837209 | 34 | 258 | 5.882353 | 0.382353 | 0.325 | 0.4 | 0.39 | 0.69 | 0.69 | 0.53 | 0.53 | 0 | 0 | 0 | 0 | 0.081395 | 258 | 4 | 123 | 64.5 | 0.843882 | 0 | 0 | 0 | 0 | 0 | 0.618677 | 0.435798 | 0 | 0 | 0 | 0 | 0 | 1 | 0.333333 | false | 0 | 0 | 0 | 0.333333 | 0.666667 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 8 |
83159d013acc0192a112eed1b17bc3fcd973367c | 5,664 | py | Python | tests/test_class_oelint_vars_bbvars.py | vermaete/oelint-adv | 6a2cadf2fd076d6e531d4bb0abcc4ad89d1fdbee | [
"BSD-2-Clause"
] | null | null | null | tests/test_class_oelint_vars_bbvars.py | vermaete/oelint-adv | 6a2cadf2fd076d6e531d4bb0abcc4ad89d1fdbee | [
"BSD-2-Clause"
] | null | null | null | tests/test_class_oelint_vars_bbvars.py | vermaete/oelint-adv | 6a2cadf2fd076d6e531d4bb0abcc4ad89d1fdbee | [
"BSD-2-Clause"
] | null | null | null | import os
import sys
import pytest
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from base import TestBaseClass
class TestClassOelintVarsBBVars(TestBaseClass):
def __generate_sample_code(self, var, operation):
return '''
{var} {operation} "foo"
'''.format(var=var, operation=operation)
@pytest.mark.parametrize('id', ['oelint.vars.bbvars'])
@pytest.mark.parametrize('occurrence', [1])
@pytest.mark.parametrize('var', [
"BB_CONSOLELOG",
"BB_CURRENTTASK",
"BB_DANGLINGAPPENDS_WARNONLY",
"BB_DEFAULT_TASK",
"BB_DISKMON_DIRS",
"BB_DISKMON_WARNINTERVAL",
"BB_ENV_EXTRAWHITE",
"BB_ENV_WHITELIST",
"BB_FETCH_PREMIRRORONLY",
"BB_FILENAME",
"BB_GENERATE_MIRROR_TARBALLS",
"BB_HASHBASE_WHITELIST",
"BB_HASHCHECK_FUNCTION",
"BB_HASHCONFIG_WHITELIST",
"BB_INVALIDCONF",
"BB_LOGFMT",
"BB_NICE_LEVEL",
"BB_NO_NETWORK",
"BB_NUMBER_PARSE_THREADS",
"BB_NUMBER_THREADS",
"BB_ORIGENV",
"BB_PRESERVE_ENV",
"BB_RUNFMT",
"BB_RUNTASK",
"BB_SCHEDULER",
"BB_SCHEDULERS",
"BB_SETSCENE_DEPVALID",
"BB_SETSCENE_VERIFY_FUNCTION",
"BB_SIGNATURE_EXCLUDE_FLAGS",
"BB_SIGNATURE_HANDLER",
"BB_SRCREV_POLICY",
"BB_STAMP_POLICY",
"BB_STAMP_WHITELIST",
"BB_STRICT_CHECKSUM",
"BB_TASK_NICE_LEVEL",
"BB_TASKHASH",
"BB_VERBOSE_LOGS",
"BB_WORKERCONTEXT",
"BBDEBUG",
"BBFILE_COLLECTIONS",
"BBFILE_PATTERN",
"BBFILE_PRIORITY",
"BBFILES",
"BBINCLUDED",
"BBINCLUDELOGS",
"BBINCLUDELOGS_LINES",
"BBLAYERS",
"BBMASK",
"BBPATH",
"BBSERVER",
"BBVERSIONS",
"BITBAKE_UI",
"BUILDNAME",
"CACHE",
"DL_DIR",
"FILE",
"FILESDIR",
"FILESPATH",
"INHERIT",
"LAYERDEPENDS",
"LAYERDIR",
"LAYERVERSION",
"MIRRORS",
"MULTI_PROVIDER_WHITELIST",
"PERSISTENT_DIR",
"PREFERRED_PROVIDER",
"PREFERRED_PROVIDERS",
"PREFERRED_VERSION",
"PREMIRRORS",
"PRSERV_HOST",
"STAMP",
"TOPDIR"
])
@pytest.mark.parametrize('operation', ['=', ':=', '.=', '=.', '+=', '=+'])
def test_bad(self, id, occurrence, var, operation):
input = {
'oelint_adv_test.bb': self.__generate_sample_code(var, operation)
}
id += '.{}'.format(var)
self.check_for_id(self._create_args(input), id, occurrence)
@pytest.mark.parametrize('id', ['oelint.vars.bbvars'])
@pytest.mark.parametrize('occurrence', [0])
@pytest.mark.parametrize('var', [
"BB_CONSOLELOG",
"BB_CURRENTTASK",
"BB_DANGLINGAPPENDS_WARNONLY",
"BB_DEFAULT_TASK",
"BB_DISKMON_DIRS",
"BB_DISKMON_WARNINTERVAL",
"BB_ENV_EXTRAWHITE",
"BB_ENV_WHITELIST",
"BB_FETCH_PREMIRRORONLY",
"BB_FILENAME",
"BB_GENERATE_MIRROR_TARBALLS",
"BB_HASHBASE_WHITELIST",
"BB_HASHCHECK_FUNCTION",
"BB_HASHCONFIG_WHITELIST",
"BB_INVALIDCONF",
"BB_LOGFMT",
"BB_NICE_LEVEL",
"BB_NO_NETWORK",
"BB_NUMBER_PARSE_THREADS",
"BB_NUMBER_THREADS",
"BB_ORIGENV",
"BB_PRESERVE_ENV",
"BB_RUNFMT",
"BB_RUNTASK",
"BB_SCHEDULER",
"BB_SCHEDULERS",
"BB_SETSCENE_DEPVALID",
"BB_SETSCENE_VERIFY_FUNCTION",
"BB_SIGNATURE_EXCLUDE_FLAGS",
"BB_SIGNATURE_HANDLER",
"BB_SRCREV_POLICY",
"BB_STAMP_POLICY",
"BB_STAMP_WHITELIST",
"BB_STRICT_CHECKSUM",
"BB_TASK_NICE_LEVEL",
"BB_TASKHASH",
"BB_VERBOSE_LOGS",
"BB_WORKERCONTEXT",
"BBDEBUG",
"BBFILE_COLLECTIONS",
"BBFILE_PATTERN",
"BBFILE_PRIORITY",
"BBFILES",
"BBINCLUDED",
"BBINCLUDELOGS",
"BBINCLUDELOGS_LINES",
"BBLAYERS",
"BBMASK",
"BBPATH",
"BBSERVER",
"BBVERSIONS",
"BITBAKE_UI",
"BUILDNAME",
"CACHE",
"DEFAULT_PREFERENCE",
"DL_DIR",
"FILE",
"FILESDIR",
"FILESPATH",
"INHERIT",
"LAYERDEPENDS",
"LAYERDIR",
"LAYERVERSION",
"MIRRORS",
"MULTI_PROVIDER_WHITELIST",
"PERSISTENT_DIR",
"PREFERRED_PROVIDER",
"PREFERRED_PROVIDERS",
"PREFERRED_VERSION",
"PREMIRRORS",
"PRSERV_HOST",
"STAMP",
"TOPDIR"
])
@pytest.mark.parametrize('operation', ['?=', '??='])
def test_good_weak(self, id, occurrence, var, operation):
input = {
'oelint_adv_test.bb': self.__generate_sample_code(var, operation)
}
id += '.{}'.format(var)
self.check_for_id(self._create_args(input), id, occurrence)
| 30.28877 | 78 | 0.501589 | 455 | 5,664 | 5.813187 | 0.294505 | 0.030246 | 0.063516 | 0.017391 | 0.901323 | 0.901323 | 0.901323 | 0.901323 | 0.901323 | 0.901323 | 0 | 0.000854 | 0.38012 | 5,664 | 186 | 79 | 30.451613 | 0.752492 | 0 | 0 | 0.892655 | 0 | 0 | 0.391773 | 0.09322 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016949 | false | 0 | 0.022599 | 0.00565 | 0.050847 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.